repo_id
stringlengths
6
101
size
int64
367
5.14M
file_path
stringlengths
2
269
content
stringlengths
367
5.14M
2833844911/cy_jsvmp
15,512
tool/jsvmp_out.js
function cltothis(cythis, poolList, off) { for (let i in poolList) { if (off == 1) { cythis[i] = undefined; } else { cythis[i] = poolList[i]; } } } function cbb_jsvmp(all, duei, start, shuz, argsList, ogg, op) { function getproto(s, d, e) { let dt = s; for (; 1 == 1;) { if (s.hasOwnProperty(d)) { try { s[d] = e; } catch (e2) { this[d] = e; return; } break; } else { s = s.__proto__; if (s == undefined || s == null) { window[d] = e; return; } } } } if (op !== undefined) { var allthis; allthis = op['allthis']; duei = op.duei; all = op.all; shuz = op.shuz; argsList = op.argsList; var a1, a2, a3, a4, a5, a6, a7, a8, a9; a7 = op.a7; var args = op.args; var cbbb = op.cbbb; } else { var allthis; if (ogg !== undefined) { allthis = ogg; } else { allthis = all; } var a1, a2, a3, a4, a5, a6, a7, a8, a9; var args = []; var cbbb = all; } while (!![]) { let s_cbb = shuz[start++]; if ([null, 551, 550, 291, 290, 252, 240, 200, 197, 195, 194, 192, 190, 181, 150, 105, 104, 90, 60, 58, 57, 56, 55, 54, 53, 52, 51, 50, 49, 48, 47, 46, 45, 44, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 20, 19, 11, 10, 8, 2, 1].indexOf(s_cbb) == -1) { return "-90_cbb"; } else if (s_cbb < 48) { if (s_cbb < 30) { if (s_cbb < 23) { if (s_cbb < 11) { if (s_cbb < 8) { if (s_cbb <= 1) { a8 = duei.length; for (a1 = 0; a1 < a8; a1++) { a7 = duei.pop(); let g = a7; all[g] = function () { let g2 = []; if (offnew == 1) { offnew = 0; a9 = { "variablePool": {}, "arguments": arguments, "zhili": [] }; a9.__proto__ = cbbb; cltothis(a9.variablePool, changlc[cbbb.variablePool[g]].variablePool); cltothis(a9, a9['variablePool'], 1); cltothis(a9['zhili'], changlc[cbbb.variablePool[g]].zhili); a6 = cbb_jsvmp(a9, g2, 0, a9['zhili'], arguments, this); } else { a9 = { "variablePool": {}, "arguments": arguments, "zhili": [] }; cltothis(a9['variablePool'], changlc[cbbb.variablePool[g]].variablePool); cltothis(a9, a9['variablePool'], 1); cltothis(a9['zhili'], changlc[cbbb.variablePool[g]].zhili); a9.__proto__ = cbbb; a6 = cbb_jsvmp(a9, g2, 0, a9['zhili'], arguments, this); } if (g2.length == 0) { return undefined; } else { let h = g2.pop(); for (; 1 == 1;) { if (g2.length == 0) { break; } else { g2.pop(); } } return h; } }; } } else { a1 = duei.length; for (a2 = 0; a2 < a1; a2++) { cbbb[duei.shift()] = argsList[a2]; } } } else if (s_cbb <= 8) { a1 = shuz[start++]; a2 = shuz[start++]; a1 = new RegExp(constantPool[a1], constantPool[a2]); duei.push(a1); } else { a1 = shuz[start++]; duei.push(constantPool[a1]); } } else if (s_cbb < 20) { if (s_cbb <= 11) { a1 = shuz[start++]; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 - a2; duei.push(a1); } } else if (s_cbb <= 20) { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 + a2; duei.push(a1); } else { a1 = shuz[start++]; a2 = duei.pop(); a3 = duei.pop(); a2[constantPool[a1]] = a3; } } else if (s_cbb < 27) { if (s_cbb < 25) { if (s_cbb <= 23) { all = cbbb; duei.push(cbbb); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 < a2; duei.push(a1); } } else if (s_cbb <= 25) { a1 = duei.pop(); a2 = shuz[start++]; if (!a1) { start += a2; } ; } else { a1 = duei.pop(); // a2 = duei.pop() a3 = shuz[start++]; a1[constantPool[a3]] += 1; } } else if (s_cbb < 29) { if (s_cbb <= 27) { a1 = duei.pop(); a2 = duei.pop(); a1 = a2 * a1; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 % a2; duei.push(a1); } } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 ^ a2; duei.push(a1); } } else if (s_cbb < 38) { if (s_cbb < 34) { if (s_cbb < 32) { if (s_cbb <= 30) { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 / a2; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 << a2; duei.push(a1); } } else if (s_cbb <= 32) { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 | a2; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 >> a2; duei.push(a1); } } else if (s_cbb < 36) { if (s_cbb <= 34) { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 >>> a2; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 & a2; duei.push(a1); } } else if (s_cbb <= 36) { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 <= a2; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 >= a2; duei.push(a1); } } else if (s_cbb < 45) { if (s_cbb < 40) { if (s_cbb <= 38) { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 > a2; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a2 == a1; duei.push(a1); } } else if (s_cbb <= 40) { a1 = duei.pop(); a2 = duei.pop(); a2.push(a1); duei.push(a2); } else { a5 = duei.pop(); duei.push(~a5); } } else if (s_cbb < 47) { if (s_cbb <= 45) { a1 = duei.pop(); a2 = duei.pop(); a3 = duei.pop(); // a3[a2] = a1; // getproto(a3,a2,a1) if (a3.variablePool != undefined) { getproto(a3, a2, a1); } else { a3[a2] = a1; } duei.push(a3); } else { a1 = shuz[start++]; a3 = duei.pop(); args = []; for (a2 = 0; a2 < a1; a2++) { args.splice(0, 0, duei.pop()); } offnew = 1; if (a3 == RegExp) { a4 = new RegExp(args[0], args[1]); } else { a4 = new a3(...args); } offnew = 0; duei.push(a4); } } else { duei.push(allthis); } } else if (s_cbb < 150) { if (s_cbb < 56) { if (s_cbb < 52) { if (s_cbb < 50) { if (s_cbb <= 48) { a1 = shuz[start++] * 2; a3 = []; a4 = []; for (a2 = 0; a2 < a1; a2++) { if (a2 < a1 / 2) { a3.splice(0, 0, duei.pop()); } else { a4.splice(0, 0, duei.pop()); } } a1 = duei.pop(); for (a2 = 0; a2 < a3.length; a2++) { if (a4[a2] == a1) { start += a3[a2]; break; } else if (a4[a2] == null) { start += a3[a2]; break; } } } else { a5 = duei.pop(); duei.push(typeof a5); } } else if (s_cbb <= 50) { a5 = duei.pop(); duei.push(-a5); } else { a1 = duei.pop(); a2 = shuz[start++]; if (!a1) { start += a2; duei.push(a1); } ; } } else if (s_cbb < 54) { if (s_cbb <= 52) { a1 = duei.pop(); // a2 = duei.pop() a3 = shuz[start++]; a1[constantPool[a3]] -= 1; } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a2 === a1; duei.push(a1); } } else if (s_cbb <= 54) { a1 = duei.pop(); a2 = duei.pop(); a1 = a2 !== a1; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a3 = delete a2[a1]; duei.push(a3); } } else if (s_cbb < 90) { if (s_cbb < 58) { if (s_cbb <= 56) { a5 = duei.pop(); duei.push(void a5); } else { let i = []; a2 = duei.pop(); for (a1 in a2) { i.push(a1); } cbbb['for_in_xh_cbb_list'] = i; } } else if (s_cbb <= 58) { a1 = duei.pop(); throw a1; } else { a5 = duei.pop(); duei.push(!a5); } } else if (s_cbb < 105) { if (s_cbb <= 90) { a1 = duei.pop(); a2 = duei.pop(); a3 = duei.pop(); // a3[a2] = a1; // getproto(a3,a2,a1) if (a3.variablePool != undefined) { getproto(a3, a2, a1); } else { a3[a2] = a1; } } else { duei.push({}); } } else { duei.push([]); } } else if (s_cbb < 200) { if (s_cbb < 194) { if (s_cbb < 190) { if (s_cbb <= 150) { a1 = shuz[start++]; a3 = duei.pop(); args = []; for (a2 = 0; a2 < a1; a2++) { args.splice(0, 0, duei.pop()); } if (a3 == window.setTimeout) { a4 = setTimeout(args[0]); } else if (a3 == window.atob) { a4 = atob(...args); } else if (a3 == window.RegExp) { a4 = RegExp(...args); } else { a4 = a3.apply(all, args); } duei.push(a4); } else { a1 = duei.pop(); a2 = duei.pop(); try { a1 = a2[a1]; } catch (e) { a1 = window[a1]; } all = a2; duei.push(a1); } } else if (s_cbb <= 190) { a1 = shuz[start++]; start += a1; } else { a1 = duei.pop(); a3 = shuz[start++]; if (a1) { start += a3; } } } else if (s_cbb < 197) { if (s_cbb <= 194) { debugger; } else { a2 = shuz[start++]; a3 = shuz[start++]; a4 = shuz[start++]; try { a6 = cbb_jsvmp(a3, start, start, duei, args.length, 1, { "shuz": shuz, "cbbb": cbbb, "allthis": allthis, "argsList": argsList, "args": args, "duei": duei, "all": all, "a7": a7 }); start = a2 + start; if (a6 == "-90_cbb") { return a6; } } catch (e) { a7 = e; start = a2 + start; a6 = cbb_jsvmp(a1, start, start, duei, args.length, 1, { "shuz": shuz, "cbbb": cbbb, "allthis": allthis, "args": args, "argsList": argsList, "duei": duei, "all": all, "a7": a7 }); if (a6 == "-90_cbb") { return a6; } } finally { if (a6 == "-90_cbb") { return a6; } start = a3 + start; a6 = cbb_jsvmp(a3, start, start, duei, args.length, 1, { "shuz": shuz, "cbbb": cbbb, "allthis": allthis, "argsList": argsList, "args": args, "duei": duei, "all": all, "a7": a7 }); if (a6 == "-90_cbb") { return a6; } start = start + a4; } } } else { let j = duei.pop(); let j2 = duei.pop(); // j2[j] = a1 // getproto(j2,j,a1) if (j2.variablePool != undefined) { getproto(j2, j, a7); } else { j2[j] = a1; } } } else if (s_cbb < 291) { if (s_cbb < 252) { if (s_cbb <= 200) { return; } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a2 < a1; duei.push(a1); } } else if (s_cbb <= 252) { a1 = duei.pop(); a2 = shuz[start++]; if (a1) { start += a2; duei.push(a1); } ; } else { a1 = duei.pop(); a2 = duei.pop(); a3 = duei.pop(); // a3[a2] = a1; // getproto(a2,a1,a3) if (a2.variablePool != undefined) { getproto(a3, a2, a1); } else { a3[a2] = a1; } } } else if (s_cbb < 551) { if (s_cbb <= 291) { a1 = duei.pop(); a2 = duei.pop(); a1 = a2 - a1; duei.push(a1); } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a2 != a1; duei.push(a1); } } else { a1 = duei.pop(); a2 = duei.pop(); a1 = a1 in a2; duei.push(a1); } } } if (!this.window) { global._fetch = global.fetch; }else{ window._fetch = window.fetch; } var ruc = this; this.offnew = 0; ruc['variablePool'] = {}; ruc['zhili'] = []; cltothis(ruc['variablePool'], changlc.awcbb_yhh_fun0.variablePool); cltothis(ruc['zhili'], changlc.awcbb_yhh_fun0.zhili); cbb_jsvmp(ruc, [], 0, changlc.awcbb_yhh_fun0.zhili);
2833844911/cy_jsvmp
2,786
tool/rename.js
const parser = require("@babel/parser"); const traverse = require("@babel/traverse").default; const tee = require("@babel/types"); const fs = require("fs"); const generator = require("@babel/generator").default; function renameCj(ast){ var offchaConsole = 0 var remanet = 1 var neNum = 0 var cbbfunj = 0 var parseInfo = { VariableDeclarator(path){ if (remanet === 0){ return } var d = path.get("id") d.scope.rename(d.node.name,"c_f_"+neNum) neNum += 1 }, CatchClause(path){ if (remanet === 0){ return } var d = path.get("param") if (d.node != null){ d.scope.rename(d.node.name,"c_f_"+neNum) neNum += 1 } }, FunctionDeclaration(path){ if (offchaConsole !== 0){ path.node.body.body.splice(0,0,tee.callExpression( tee.memberExpression(tee.identifier("console"),tee.identifier("log")), [tee.stringLiteral("cbbfun"+cbbfunj)] )) cbbfunj += 1 } if (remanet === 0){ return } var d = path.get("id") if (d.node != null && d.node.name.indexOf("cbb_")!==0){ d.scope.rename(d.node.name,"c_f_"+neNum) neNum += 1 }else{ d.scope.rename(d.node.name,"cbb_"+neNum) neNum += 1 } var d = path.node.params var k =path.get("param") for (let i =0; i< d.length; i++){ k.scope.rename(d[i].name,"c_f_"+neNum) neNum += 1 } }, FunctionExpression(path){ if (offchaConsole !== 0) { path.node.body.body.splice(0, 0, tee.callExpression( tee.memberExpression(tee.identifier("console"), tee.identifier("log")), [tee.stringLiteral("cbbfun" + cbbfunj)] )) cbbfunj += 1 } if (remanet === 0){ return } var d = path.get("id") if (d.node != null){ d.scope.rename(d.node.name,"c_f_"+neNum) neNum += 1 } var d = path.node.params var k =path.get("param") for (let i =0; i< d.length; i++){ k.scope.rename(d[i].name,"c_f_"+neNum) neNum += 1 } } } traverse(ast, parseInfo) var f = generator(ast).code; fs.writeFileSync("./outsrc/out2.js", f, (e)=>{}) return ast } exports.renameCj = renameCj;
27182812/ChatGLM-LLaMA-chinese-insturct
5,729
src/transformers/models/gpt_neox_japanese/configuration_gpt_neox_japanese.py
# coding=utf-8 # Copyright 2022 ABEJA, Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ GPTNeoX Japanese model configuration""" from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) GPT_NEOX_JAPANESE_PRETRAINED_CONFIG_ARCHIVE_MAP = { "abeja/gpt-neox-japanese-2.7b": "https://huggingface.co/abeja/gpt-neox-japanese-2.7b/resolve/main/config.json", } class GPTNeoXJapaneseConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`GPTNeoXModelJapanese`]. It is used to instantiate a GPTNeoX model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the GPTNeoXJapanese [abeja/gpt-neox-japanese-2.7b](https://huggingface.co/abeja/gpt-neox-japanese-2.7b) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Default configs is set as 2.7B model Args: vocab_size (`int`, *optional*, defaults to 32000): Vocabulary size of the GPTNeoXJapanese model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`GPTNeoXJapanese`]. hidden_size (`int`, *optional*, defaults to 2560): Dimension of the encoder layers and the pooler layer. num_hidden_layers (`int`, *optional*, defaults to 32): Number of hidden layers in the Transformer encoder. num_attention_heads (`int`, *optional*, defaults to 32): Number of attention heads for each attention layer in the Transformer encoder. intermediate_multiple_size (`int`, *optional*, defaults to 4): Dimension of the "intermediate" layer in the Transformer encoder is calculated by hidden_size * intermediate_multiple_size. hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. rotary_pct (`float`, *optional*, defaults to 1.00): percentage of hidden dimensions to allocate to rotary embeddings rotary_emb_base (`int`, *optional*, defaults to 10000) base for computing rotary embeddings frequency max_position_embeddings (`int`, *optional*, defaults to 2048): The maximum sequence length that this model might ever be used with. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon used by the layer normalization layers. use_cache (`bool`, *optional*, defaults to `True`): Whether or not the model should return the last key/values attentions (not used by all models). Only relevant if `config.is_decoder=True`. attention_dropout (`float`, *optional*, defaults to 0.1): The dropout ratio for the attention. hidden_dropout (`float`, *optional*, defaults to 0.0): The dropout ratio for the hidden layer. Example: ```python >>> from transformers import GPTNeoXJapaneseConfig, GPTNeoXJapaneseModel >>> # Initializing a GPTNeoXJapanese gpt-neox-japanese-2.7b style configuration >>> configuration = GPTNeoXJapaneseConfig() >>> # Initializing a model (with random weights) from the gpt-neox-japanese-2.7b style configuration >>> model = GPTNeoXJapaneseModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "gpt_neox_japanese" def __init__( self, vocab_size=32000, hidden_size=2560, num_hidden_layers=32, num_attention_heads=32, intermediate_multiple_size=4, hidden_act="gelu", rotary_pct=1.00, rotary_emb_base=10000, max_position_embeddings=2048, initializer_range=0.02, layer_norm_eps=1e-5, use_cache=True, bos_token_id=31996, eos_token_id=31999, attention_dropout=0.1, hidden_dropout=0.0, **kwargs, ): super().__init__(bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs) self.vocab_size = vocab_size self.max_position_embeddings = max_position_embeddings self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_multiple_size = intermediate_multiple_size self.hidden_act = hidden_act self.rotary_pct = rotary_pct self.rotary_emb_base = rotary_emb_base self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.use_cache = use_cache self.attention_dropout = attention_dropout self.hidden_dropout = hidden_dropout
2833844911/gojsvmp
2,441
evaluator/eval_test.go
package evaluator import ( "fmt" "testing" ) func TestCyJS(t *testing.T) { code := ` function goTonr(){ this.headers = { "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7", "accept-language": "zh-CN,zh;q=0.9", "cache-control": "no-cache", "pragma": "no-cache", "priority": "u=0, i", "referer": "https://travel.qunar.com/search/gonglue/22-shanghai-299878/hot_heat/3.htm", "^sec-ch-ua": "^\\^Google", "sec-ch-ua-mobile": "?0", "^sec-ch-ua-platform": "^\\^Windows^^^", "sec-fetch-dest": "document", "sec-fetch-mode": "navigate", "sec-fetch-site": "same-origin", "sec-fetch-user": "?1", "upgrade-insecure-requests": "1", "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36" } this.getPageInfo = function (url){ console.log("开始异步Promise请求",url) var d = new Promise(function(){ req = cyhttp.get(url,{ "headers": this.headers, "timeout": 30, // "proxies":"http://127.0.0.1:8888" }) cyout(req.text) // cbb_a传递到then的第一个函数 || cbb_b传递到then的第二个函数 cbb_a(req.text) }) d.then(function (text){ parseHTML = etree.HTML(text) title = parseHTML.xpath('//ul[@class="b_strategy_list "]/li//h2') urlList = parseHTML.xpath('//ul[@class="b_strategy_list "]/li//h2/a/@href') var b = fs.open("./a.csv",{"ms":"a"}) for (var i=0 ; i<title.length; i++){ b.write( [title[i].xpath('.//text()').join(" "), "https://travel.qunar.com"+urlList[i]].join(",")+"\n") console.log(title[i].xpath('.//text()').join(" ")) } b.close() console.log("结束异步请求") }) } } var b = fs.open("./a.csv",{"ms":"w"}) b.write(["标题","链接"].join(",")+"\n") b.close() var ff = new goTonr() for (var i =1; i<10; i++){ st = Date.now() ff.getPageInfo("https://travel.qunar.com/search/gonglue/22-shanghai-299878/hot_heat/"+i+".htm") // 等待前面异步操作结束 wait() cyout("请求时间", Date.now() - st, "ms") // 休眠10000ms debugger Date.sleep(10000) } ` ddd := Eval(code) fmt.Println("程序结束", ddd) }
2833844911/gojsvmp
7,257
ast/ast.go
package ast import ( "myvmp/token" ) // The base Node interface type Node interface { toString() } type Statement interface { Node StatementNode() string } type Program struct { Body []*Statement TypeInfo string } func (nn *Program) StatementNode() string { nn.TypeInfo = token.Prog return token.Prog } func (nn *Program) toString() { } type Identifier struct { Name string PAIX int TypeInfo string } func (nn *Identifier) StatementNode() string { nn.TypeInfo = token.IDENT return token.IDENT } func (nn *Identifier) toString() { } type NullIdentifier struct { Value string PAIX int TypeInfo string } func (nn *NullIdentifier) StatementNode() string { nn.TypeInfo = token.NULL return token.NULL } func (nn *NullIdentifier) toString() { } type VariableDeclaration struct { Token string Declarations []*Statement Init Statement PAIX int TypeInfo string } func (nn *VariableDeclaration) StatementNode() string { nn.TypeInfo = token.VAR return token.VAR } func (nn *VariableDeclaration) toString() { } type NumericLiteral struct { Value float64 PAIX int TypeInfo string } func (nn *NumericLiteral) StatementNode() string { nn.TypeInfo = token.INT return token.INT } func (nn *NumericLiteral) toString() { } type BinaryExpression struct { Left Statement Right Statement Operator string PAIX int TypeInfo string } func (nn *BinaryExpression) StatementNode() string { nn.TypeInfo = token.Bin return token.Bin } func (nn *BinaryExpression) toString() { } type NOP struct { TypeInfo string } func (nn *NOP) StatementNode() string { nn.TypeInfo = token.NOP return token.NOP } func (nn *NOP) toString() { } type OVER struct { TypeInfo string } func (nn *OVER) StatementNode() string { //nn.TypeInfo = token.OVER return nn.TypeInfo } func (nn *OVER) toString() { } type AssignmentExpression struct { Left Statement Right Statement Operator string PAIX int TypeInfo string } func (nn *AssignmentExpression) StatementNode() string { nn.TypeInfo = token.Ass return token.Ass } func (nn *AssignmentExpression) toString() { } type CallExpression struct { Caller Statement Arguments []*Statement PAIX int TypeInfo string } func (nn *CallExpression) StatementNode() string { nn.TypeInfo = token.Call return token.Call } func (nn *CallExpression) toString() { } type IfStatement struct { Test Statement Consequent Statement Alternate Statement PAIX int TypeInfo string } func (nn *IfStatement) StatementNode() string { nn.TypeInfo = token.IfStat return token.IfStat } func (nn *IfStatement) toString() { } type BlockStatement struct { Body []*Statement PAIX int TypeInfo string } func (nn *BlockStatement) StatementNode() string { nn.TypeInfo = token.Block return token.Block } func (nn *BlockStatement) toString() { } type UnaryExpression struct { Argument Statement Prefix bool Operator string PAIX int TypeInfo string } func (nn *UnaryExpression) StatementNode() string { nn.TypeInfo = token.Unary return token.Unary } func (nn *UnaryExpression) toString() { } type FunctionDeclaration struct { Id Statement Params []*Statement Body Statement PAIX int TypeInfo string } func (nn *FunctionDeclaration) StatementNode() string { nn.TypeInfo = token.FuncD return token.FuncD } func (nn *FunctionDeclaration) toString() { } type FunctionExpression struct { Id Statement Params []*Statement Body Statement PAIX int TypeInfo string } func (nn *FunctionExpression) StatementNode() string { nn.TypeInfo = token.FuncE return token.FuncE } func (nn *FunctionExpression) toString() { } type MemberExpression struct { Object Statement Property Statement PAIX int TypeInfo string } func (nn *MemberExpression) StatementNode() string { nn.TypeInfo = token.Member return token.Member } func (nn *MemberExpression) toString() { } type StringLiteral struct { Value string PAIX int TypeInfo string } func (nn *StringLiteral) StatementNode() string { nn.TypeInfo = token.Stri return token.Stri } func (nn *StringLiteral) toString() { } type ThisExpression struct { PAIX int TypeInfo string } func (nn *ThisExpression) StatementNode() string { nn.TypeInfo = token.THIS return token.THIS } func (nn *ThisExpression) toString() { } type BreakStatement struct { PAIX int TypeInfo string } func (nn *BreakStatement) StatementNode() string { nn.TypeInfo = token.BREAK return token.BREAK } func (nn *BreakStatement) toString() { } type ContinueStatement struct { PAIX int TypeInfo string } func (nn *ContinueStatement) StatementNode() string { nn.TypeInfo = token.CONTINUE return token.CONTINUE } func (nn *ContinueStatement) toString() { } type ForStatement struct { Init Statement Test Statement Updata Statement Body Statement PAIX int TypeInfo string } func (nn *ForStatement) StatementNode() string { nn.TypeInfo = token.ForS return token.ForS } func (nn *ForStatement) toString() { } type ForInStatement struct { Left Statement Right Statement Body Statement PAIX int TypeInfo string } func (nn *ForInStatement) StatementNode() string { nn.TypeInfo = token.ForI return token.ForI } func (nn *ForInStatement) toString() { } type ArrayExpression struct { Elements []*Statement PAIX int TypeInfo string } func (nn *ArrayExpression) StatementNode() string { nn.TypeInfo = token.ArrayE return token.ArrayE } func (nn *ArrayExpression) toString() { } type ObjectExpression struct { Properties []*Statement PAIX int TypeInfo string } func (nn *ObjectExpression) StatementNode() string { nn.TypeInfo = token.Object return token.Object } func (nn *ObjectExpression) toString() { } type Property struct { Key Statement Value Statement PAIX int TypeInfo string } func (nn *Property) StatementNode() string { nn.TypeInfo = token.Prop return token.Prop } func (nn *Property) toString() { } type NewExpression struct { Callee Statement Arguments []*Statement PAIX int TypeInfo string } func (nn *NewExpression) StatementNode() string { nn.TypeInfo = token.NEW return token.NEW } func (nn *NewExpression) toString() { } type ReturnStatement struct { Argument Statement PAIX int TypeInfo string } func (nn *ReturnStatement) StatementNode() string { nn.TypeInfo = token.RETURN return token.RETURN } func (nn *ReturnStatement) toString() { } type DebugStatement struct { PAIX int TypeInfo string } func (nn *DebugStatement) StatementNode() string { nn.TypeInfo = token.Debug return token.Debug } func (nn *DebugStatement) toString() { } type TryStatement struct { Block Statement Handler Statement PAIX int TypeInfo string } func (nn *TryStatement) StatementNode() string { nn.TypeInfo = token.TRY return token.TRY } func (nn *TryStatement) toString() { } type CatchClause struct { Param Statement Body Statement PAIX int TypeInfo string } func (nn *CatchClause) StatementNode() string { nn.TypeInfo = token.CATCH return token.CATCH } func (nn *CatchClause) toString() { }
2833844911/gojsvmp
2,252
etree/etree.go
package etree import ( "fmt" "github.com/antchfx/htmlquery" "golang.org/x/net/html" "myvmp/object" "myvmp/token" "strings" ) func new_Func(ddd *func(*object.FunctionDeclarationObject) object.Object, typeS string, Obj any) object.Object { d := &object.FunctionDeclarationObject{IsNative: 1, NativeBody: ddd, BindType: typeS, BindOb: Obj} return d } func Etree_gethtml(myfun *object.FunctionDeclarationObject) object.Object { doc := myfun.BindOb.(*html.Node) ggg := htmlquery.OutputHTML(doc, true) return &object.StringObject{Value: ggg} } func Etree_xpath(myfun *object.FunctionDeclarationObject) object.Object { Etree := myfun.BindOb.(*html.Node) dtt := strings.TrimSpace((*myfun.Args[0]).(*object.StringObject).Value) gettext := 0 if len(dtt) >= 7 && dtt[len(dtt)-7:] == "/text()" { gettext = 1 } else { ddkk := strings.Split(dtt, "/") dc := ddkk[len(ddkk)-1][0:1] if dc == "@" { gettext = 1 } } NodeList := htmlquery.Find(Etree, dtt) dss := object.NewArray() for _, ss := range NodeList { if gettext == 0 { parseHtml := object.NewEnv(nil) parseHtml.TypeInfo = token.THIS etreeXpath := Etree_xpath etreeXpath2 := new_Func(&etreeXpath, token.Etree, ss) parseHtml.Store.Set(token.Etree_xpath, etreeXpath2) etree_gethtml := Etree_gethtml etree_gethtml2 := new_Func(&etree_gethtml, token.Etree, ss) parseHtml.Store.Set(token.Etree_gethtml, etree_gethtml2) var hh object.Object = parseHtml dss.Value = append(dss.Value, &hh) } else { ddd := htmlquery.InnerText(ss) h := &object.StringObject{Value: ddd} var hh object.Object = h dss.Value = append(dss.Value, &hh) } } return &dss } func Etree_HTML(myfun *object.FunctionDeclarationObject) object.Object { html := (*myfun.Args[0]).ToString() doc, err := htmlquery.Parse(strings.NewReader(html)) if err != nil { fmt.Println("Error parsing HTML:", err) return nil } parseHtml := object.NewEnv(nil) etreeXpath := Etree_xpath etreeXpath2 := new_Func(&etreeXpath, token.Etree, doc) parseHtml.Store.Set(token.Etree_xpath, etreeXpath2) etree_gethtml := Etree_gethtml etree_gethtml2 := new_Func(&etree_gethtml, token.Etree, doc) parseHtml.Store.Set(token.Etree_gethtml, etree_gethtml2) return parseHtml }
2833844911/cy_jsvmp
250,554
jiaquban/gujia.js
function cshduei(){this.cf=function(){var b;if(!this.gx[this.s][0]){debugger;this.gx[this.s][0]="yhhw"+this.num;this.gx["yhhw"+this.num]=[undefined,this.s,undefined];this.num+=1;}return b=this.gx[this.s][2],this.s=this.gx[this.s][0],this.CFf-=1,b;};this.cF=function(b){var e=this.gx[this.s][1];if(!e){debugger;this.gx["yhh"+this.num]=[this.s,undefined,undefined],e="yhh"+this.num,this.num+=1;}return this.s=e,this.gx[this.s][2]=b,this.CFf+=1,b;};this.sf=function(){var b;if(this.CFf<1){debugger;return 10;}return b=this.gx[this.s][2],this.s=this.gx[this.s][0],this.CFf--,this.s=this.gx[this.s][1],this.gx[this.s][2]=b,this.CFf+=1,101;};this.Cf=function(){var b,e,c=this.s;this.CFf-=1;this.s=this.gx[this.s][0];e=this.gx[c][2];while(!![]){if(c===this.gx["cbb1"][1]){debugger;break;}b=this.gx[this.gx[c][0]][2],this.gx[this.gx[c][0]][2]=e,e=b,c=this.gx[c][0];}return e;};this.gx={cbb1:[undefined,"cbb760",undefined],cbb760:["cbb1","cbb637",undefined],cbb637:["cbb760","cbb997",undefined],cbb997:["cbb637","cbb70",undefined],cbb70:["cbb997","cbb737",undefined],cbb737:["cbb70","cbb888",undefined],cbb888:["cbb737","cbb284",undefined],cbb284:["cbb888","cbb739",undefined],cbb739:["cbb284","cbb403",undefined],cbb403:["cbb739","cbb143",undefined],cbb143:["cbb403","cbb962",undefined],cbb962:["cbb143","cbb434",undefined],cbb434:["cbb962","cbb305",undefined],cbb305:["cbb434","cbb983",undefined],cbb983:["cbb305","cbb499",undefined],cbb499:["cbb983","cbb845",undefined],cbb845:["cbb499","cbb99",undefined],cbb99:["cbb845","cbb355",undefined],cbb355:["cbb99","cbb480",undefined],cbb480:["cbb355","cbb264",undefined],cbb264:["cbb480","cbb990",undefined],cbb990:["cbb264","cbb779",undefined],cbb779:["cbb990","cbb897",undefined],cbb897:["cbb779","cbb851",undefined],cbb851:["cbb897","cbb967",undefined],cbb967:["cbb851","cbb504",undefined],cbb504:["cbb967","cbb685",undefined],cbb685:["cbb504","cbb8",undefined],cbb8:["cbb685","cbb847",undefined],cbb847:["cbb8","cbb557",undefined],cbb557:["cbb847","cbb212",undefined],cbb212:["cbb557","cbb203",undefined],cbb203:["cbb212","cbb960",undefined],cbb960:["cbb203","cbb22",undefined],cbb22:["cbb960","cbb168",undefined],cbb168:["cbb22","cbb702",undefined],cbb702:["cbb168","cbb166",undefined],cbb166:["cbb702","cbb492",undefined],cbb492:["cbb166","cbb223",undefined],cbb223:["cbb492","cbb415",undefined],cbb415:["cbb223","cbb916",undefined],cbb916:["cbb415","cbb433",undefined],cbb433:["cbb916","cbb452",undefined],cbb452:["cbb433","cbb316",undefined],cbb316:["cbb452","cbb109",undefined],cbb109:["cbb316","cbb118",undefined],cbb118:["cbb109","cbb361",undefined],cbb361:["cbb118","cbb200",undefined],cbb200:["cbb361","cbb48",undefined],cbb48:["cbb200","cbb176",undefined],cbb176:["cbb48","cbb357",undefined],cbb357:["cbb176","cbb76",undefined],cbb76:["cbb357","cbb356",undefined],cbb356:["cbb76","cbb634",undefined],cbb634:["cbb356","cbb910",undefined],cbb910:["cbb634","cbb591",undefined],cbb591:["cbb910","cbb508",undefined],cbb508:["cbb591","cbb478",undefined],cbb478:["cbb508","cbb554",undefined],cbb554:["cbb478","cbb441",undefined],cbb441:["cbb554","cbb283",undefined],cbb283:["cbb441","cbb981",undefined],cbb981:["cbb283","cbb129",undefined],cbb129:["cbb981","cbb770",undefined],cbb770:["cbb129","cbb127",undefined],cbb127:["cbb770","cbb855",undefined],cbb855:["cbb127","cbb666",undefined],cbb666:["cbb855","cbb149",undefined],cbb149:["cbb666","cbb112",undefined],cbb112:["cbb149","cbb208",undefined],cbb208:["cbb112","cbb592",undefined],cbb592:["cbb208","cbb268",undefined],cbb268:["cbb592","cbb391",undefined],cbb391:["cbb268","cbb151",undefined],cbb151:["cbb391","cbb576",undefined],cbb576:["cbb151","cbb610",undefined],cbb610:["cbb576","cbb108",undefined],cbb108:["cbb610","cbb565",undefined],cbb565:["cbb108","cbb695",undefined],cbb695:["cbb565","cbb718",undefined],cbb718:["cbb695","cbb241",undefined],cbb241:["cbb718","cbb590",undefined],cbb590:["cbb241","cbb336",undefined],cbb336:["cbb590","cbb211",undefined],cbb211:["cbb336","cbb35",undefined],cbb35:["cbb211","cbb638",undefined],cbb638:["cbb35","cbb80",undefined],cbb80:["cbb638","cbb299",undefined],cbb299:["cbb80","cbb829",undefined],cbb829:["cbb299","cbb436",undefined],cbb436:["cbb829","cbb327",undefined],cbb327:["cbb436","cbb399",undefined],cbb399:["cbb327","cbb59",undefined],cbb59:["cbb399","cbb606",undefined],cbb606:["cbb59","cbb398",undefined],cbb398:["cbb606","cbb738",undefined],cbb738:["cbb398","cbb533",undefined],cbb533:["cbb738","cbb429",undefined],cbb429:["cbb533","cbb755",undefined],cbb755:["cbb429","cbb872",undefined],cbb872:["cbb755","cbb838",undefined],cbb838:["cbb872","cbb566",undefined],cbb566:["cbb838","cbb135",undefined],cbb135:["cbb566","cbb764",undefined],cbb764:["cbb135","cbb6",undefined],cbb6:["cbb764","cbb545",undefined],cbb545:["cbb6","cbb494",undefined],cbb494:["cbb545","cbb687",undefined],cbb687:["cbb494","cbb556",undefined],cbb556:["cbb687","cbb234",undefined],cbb234:["cbb556","cbb555",undefined],cbb555:["cbb234","cbb178",undefined],cbb178:["cbb555","cbb751",undefined],cbb751:["cbb178","cbb171",undefined],cbb171:["cbb751","cbb404",undefined],cbb404:["cbb171","cbb228",undefined],cbb228:["cbb404","cbb98",undefined],cbb98:["cbb228","cbb102",undefined],cbb102:["cbb98","cbb38",undefined],cbb38:["cbb102","cbb773",undefined],cbb773:["cbb38","cbb387",undefined],cbb387:["cbb773","cbb258",undefined],cbb258:["cbb387","cbb708",undefined],cbb708:["cbb258","cbb448",undefined],cbb448:["cbb708","cbb853",undefined],cbb853:["cbb448","cbb231",undefined],cbb231:["cbb853","cbb932",undefined],cbb932:["cbb231","cbb538",undefined],cbb538:["cbb932","cbb683",undefined],cbb683:["cbb538","cbb931",undefined],cbb931:["cbb683","cbb994",undefined],cbb994:["cbb931","cbb856",undefined],cbb856:["cbb994","cbb747",undefined],cbb747:["cbb856","cbb629",undefined],cbb629:["cbb747","cbb495",undefined],cbb495:["cbb629","cbb440",undefined],cbb440:["cbb495","cbb236",undefined],cbb236:["cbb440","cbb315",undefined],cbb315:["cbb236","cbb367",undefined],cbb367:["cbb315","cbb969",undefined],cbb969:["cbb367","cbb40",undefined],cbb40:["cbb969","cbb813",undefined],cbb813:["cbb40","cbb534",undefined],cbb534:["cbb813","cbb870",undefined],cbb870:["cbb534","cbb985",undefined],cbb985:["cbb870","cbb373",undefined],cbb373:["cbb985","cbb10",undefined],cbb10:["cbb373","cbb706",undefined],cbb706:["cbb10","cbb107",undefined],cbb107:["cbb706","cbb965",undefined],cbb965:["cbb107","cbb450",undefined],cbb450:["cbb965","cbb202",undefined],cbb202:["cbb450","cbb882",undefined],cbb882:["cbb202","cbb954",undefined],cbb954:["cbb882","cbb317",undefined],cbb317:["cbb954","cbb349",undefined],cbb349:["cbb317","cbb705",undefined],cbb705:["cbb349","cbb647",undefined],cbb647:["cbb705","cbb64",undefined],cbb64:["cbb647","cbb311",undefined],cbb311:["cbb64","cbb649",undefined],cbb649:["cbb311","cbb259",undefined],cbb259:["cbb649","cbb134",undefined],cbb134:["cbb259","cbb11",undefined],cbb11:["cbb134","cbb652",undefined],cbb652:["cbb11","cbb358",undefined],cbb358:["cbb652","cbb621",undefined],cbb621:["cbb358","cbb89",undefined],cbb89:["cbb621","cbb887",undefined],cbb887:["cbb89","cbb451",undefined],cbb451:["cbb887","cbb505",undefined],cbb505:["cbb451","cbb630",undefined],cbb630:["cbb505","cbb775",undefined],cbb775:["cbb630","cbb69",undefined],cbb69:["cbb775","cbb849",undefined],cbb849:["cbb69","cbb130",undefined],cbb130:["cbb849","cbb631",undefined],cbb631:["cbb130","cbb167",undefined],cbb167:["cbb631","cbb642",undefined],cbb642:["cbb167","cbb52",undefined],cbb52:["cbb642","cbb121",undefined],cbb121:["cbb52","cbb340",undefined],cbb340:["cbb121","cbb612",undefined],cbb612:["cbb340","cbb939",undefined],cbb939:["cbb612","cbb92",undefined],cbb92:["cbb939","cbb335",undefined],cbb335:["cbb92","cbb709",undefined],cbb709:["cbb335","cbb676",undefined],cbb676:["cbb709","cbb289",undefined],cbb289:["cbb676","cbb444",undefined],cbb444:["cbb289","cbb531",undefined],cbb531:["cbb444","cbb524",undefined],cbb524:["cbb531","cbb974",undefined],cbb974:["cbb524","cbb588",undefined],cbb588:["cbb974","cbb935",undefined],cbb935:["cbb588","cbb840",undefined],cbb840:["cbb935","cbb122",undefined],cbb122:["cbb840","cbb141",undefined],cbb141:["cbb122","cbb12",undefined],cbb12:["cbb141","cbb244",undefined],cbb244:["cbb12","cbb61",undefined],cbb61:["cbb244","cbb790",undefined],cbb790:["cbb61","cbb660",undefined],cbb660:["cbb790","cbb547",undefined],cbb547:["cbb660","cbb869",undefined],cbb869:["cbb547","cbb181",undefined],cbb181:["cbb869","cbb293",undefined],cbb293:["cbb181","cbb772",undefined],cbb772:["cbb293","cbb529",undefined],cbb529:["cbb772","cbb199",undefined],cbb199:["cbb529","cbb47",undefined],cbb47:["cbb199","cbb648",undefined],cbb648:["cbb47","cbb155",undefined],cbb155:["cbb648","cbb185",undefined],cbb185:["cbb155","cbb672",undefined],cbb672:["cbb185","cbb301",undefined],cbb301:["cbb672","cbb771",undefined],cbb771:["cbb301","cbb126",undefined],cbb126:["cbb771","cbb469",undefined],cbb469:["cbb126","cbb197",undefined],cbb197:["cbb469","cbb537",undefined],cbb537:["cbb197","cbb457",undefined],cbb457:["cbb537","cbb874",undefined],cbb874:["cbb457","cbb805",undefined],cbb805:["cbb874","cbb788",undefined],cbb788:["cbb805","cbb793",undefined],cbb793:["cbb788","cbb578",undefined],cbb578:["cbb793","cbb725",undefined],cbb725:["cbb578","cbb172",undefined],cbb172:["cbb725","cbb507",undefined],cbb507:["cbb172","cbb781",undefined],cbb781:["cbb507","cbb490",undefined],cbb490:["cbb781","cbb938",undefined],cbb938:["cbb490","cbb157",undefined],cbb157:["cbb938","cbb665",undefined],cbb665:["cbb157","cbb599",undefined],cbb599:["cbb665","cbb503",undefined],cbb503:["cbb599","cbb432",undefined],cbb432:["cbb503","cbb374",undefined],cbb374:["cbb432","cbb180",undefined],cbb180:["cbb374","cbb513",undefined],cbb513:["cbb180","cbb627",undefined],cbb627:["cbb513","cbb892",undefined],cbb892:["cbb627","cbb743",undefined],cbb743:["cbb892","cbb454",undefined],cbb454:["cbb743","cbb115",undefined],cbb115:["cbb454","cbb574",undefined],cbb574:["cbb115","cbb940",undefined],cbb940:["cbb574","cbb815",undefined],cbb815:["cbb940","cbb237",undefined],cbb237:["cbb815","cbb593",undefined],cbb593:["cbb237","cbb693",undefined],cbb693:["cbb593","cbb778",undefined],cbb778:["cbb693","cbb819",undefined],cbb819:["cbb778","cbb734",undefined],cbb734:["cbb819","cbb941",undefined],cbb941:["cbb734","cbb63",undefined],cbb63:["cbb941","cbb286",undefined],cbb286:["cbb63","cbb889",undefined],cbb889:["cbb286","cbb461",undefined],cbb461:["cbb889","cbb42",undefined],cbb42:["cbb461","cbb551",undefined],cbb551:["cbb42","cbb260",undefined],cbb260:["cbb551","cbb518",undefined],cbb518:["cbb260","cbb18",undefined],cbb18:["cbb518","cbb904",undefined],cbb904:["cbb18","cbb964",undefined],cbb964:["cbb904","cbb961",undefined],cbb961:["cbb964","cbb383",undefined],cbb383:["cbb961","cbb58",undefined],cbb58:["cbb383","cbb205",undefined],cbb205:["cbb58","cbb835",undefined],cbb835:["cbb205","cbb570",undefined],cbb570:["cbb835","cbb930",undefined],cbb930:["cbb570","cbb46",undefined],cbb46:["cbb930","cbb946",undefined],cbb946:["cbb46","cbb657",undefined],cbb657:["cbb946","cbb248",undefined],cbb248:["cbb657","cbb114",undefined],cbb114:["cbb248","cbb653",undefined],cbb653:["cbb114","cbb909",undefined],cbb909:["cbb653","cbb190",undefined],cbb190:["cbb909","cbb891",undefined],cbb891:["cbb190","cbb475",undefined],cbb475:["cbb891","cbb90",undefined],cbb90:["cbb475","cbb546",undefined],cbb546:["cbb90","cbb975",undefined],cbb975:["cbb546","cbb794",undefined],cbb794:["cbb975","cbb920",undefined],cbb920:["cbb794","cbb465",undefined],cbb465:["cbb920","cbb5",undefined],cbb5:["cbb465","cbb96",undefined],cbb96:["cbb5","cbb388",undefined],cbb388:["cbb96","cbb867",undefined],cbb867:["cbb388","cbb558",undefined],cbb558:["cbb867","cbb169",undefined],cbb169:["cbb558","cbb933",undefined],cbb933:["cbb169","cbb577",undefined],cbb577:["cbb933","cbb776",undefined],cbb776:["cbb577","cbb25",undefined],cbb25:["cbb776","cbb343",undefined],cbb343:["cbb25","cbb77",undefined],cbb77:["cbb343","cbb646",undefined],cbb646:["cbb77","cbb502",undefined],cbb502:["cbb646","cbb410",undefined],cbb410:["cbb502","cbb979",undefined],cbb979:["cbb410","cbb787",undefined],cbb787:["cbb979","cbb972",undefined],cbb972:["cbb787","cbb877",undefined],cbb877:["cbb972","cbb101",undefined],cbb101:["cbb877","cbb645",undefined],cbb645:["cbb101","cbb142",undefined],cbb142:["cbb645","cbb654",undefined],cbb654:["cbb142","cbb160",undefined],cbb160:["cbb654","cbb384",undefined],cbb384:["cbb160","cbb923",undefined],cbb923:["cbb384","cbb466",undefined],cbb466:["cbb923","cbb302",undefined],cbb302:["cbb466","cbb195",undefined],cbb195:["cbb302","cbb21",undefined],cbb21:["cbb195","cbb689",undefined],cbb689:["cbb21","cbb39",undefined],cbb39:["cbb689","cbb191",undefined],cbb191:["cbb39","cbb875",undefined],cbb875:["cbb191","cbb158",undefined],cbb158:["cbb875","cbb798",undefined],cbb798:["cbb158","cbb194",undefined],cbb194:["cbb798","cbb792",undefined],cbb792:["cbb194","cbb539",undefined],cbb539:["cbb792","cbb623",undefined],cbb623:["cbb539","cbb530",undefined],cbb530:["cbb623","cbb799",undefined],cbb799:["cbb530","cbb925",undefined],cbb925:["cbb799","cbb542",undefined],cbb542:["cbb925","cbb804",undefined],cbb804:["cbb542","cbb806",undefined],cbb806:["cbb804","cbb272",undefined],cbb272:["cbb806","cbb868",undefined],cbb868:["cbb272","cbb701",undefined],cbb701:["cbb868","cbb852",undefined],cbb852:["cbb701","cbb663",undefined],cbb663:["cbb852","cbb252",undefined],cbb252:["cbb663","cbb187",undefined],cbb187:["cbb252","cbb876",undefined],cbb876:["cbb187","cbb326",undefined],cbb326:["cbb876","cbb730",undefined],cbb730:["cbb326","cbb912",undefined],cbb912:["cbb730","cbb801",undefined],cbb801:["cbb912","cbb28",undefined],cbb28:["cbb801","cbb368",undefined],cbb368:["cbb28","cbb511",undefined],cbb511:["cbb368","cbb488",undefined],cbb488:["cbb511","cbb276",undefined],cbb276:["cbb488","cbb750",undefined],cbb750:["cbb276","cbb120",undefined],cbb120:["cbb750","cbb324",undefined],cbb324:["cbb120","cbb189",undefined],cbb189:["cbb324","cbb650",undefined],cbb650:["cbb189","cbb242",undefined],cbb242:["cbb650","cbb182",undefined],cbb182:["cbb242","cbb677",undefined],cbb677:["cbb182","cbb376",undefined],cbb376:["cbb677","cbb766",undefined],cbb766:["cbb376","cbb338",undefined],cbb338:["cbb766","cbb626",undefined],cbb626:["cbb338","cbb396",undefined],cbb396:["cbb626","cbb893",undefined],cbb893:["cbb396","cbb257",undefined],cbb257:["cbb893","cbb595",undefined],cbb595:["cbb257","cbb567",undefined],cbb567:["cbb595","cbb360",undefined],cbb360:["cbb567","cbb370",undefined],cbb370:["cbb360","cbb235",undefined],cbb235:["cbb370","cbb238",undefined],cbb238:["cbb235","cbb864",undefined],cbb864:["cbb238","cbb664",undefined],cbb664:["cbb864","cbb49",undefined],cbb49:["cbb664","cbb993",undefined],cbb993:["cbb49","cbb560",undefined],cbb560:["cbb993","cbb563",undefined],cbb563:["cbb560","cbb808",undefined],cbb808:["cbb563","cbb251",undefined],cbb251:["cbb808","cbb651",undefined],cbb651:["cbb251","cbb988",undefined],cbb988:["cbb651","cbb950",undefined],cbb950:["cbb988","cbb159",undefined],cbb159:["cbb950","cbb883",undefined],cbb883:["cbb159","cbb4",undefined],cbb4:["cbb883","cbb489",undefined],cbb489:["cbb4","cbb970",undefined],cbb970:["cbb489","cbb696",undefined],cbb696:["cbb970","cbb179",undefined],cbb179:["cbb696","cbb945",undefined],cbb945:["cbb179","cbb50",undefined],cbb50:["cbb945","cbb418",undefined],cbb418:["cbb50","cbb37",undefined],cbb37:["cbb418","cbb681",undefined],cbb681:["cbb37","cbb339",undefined],cbb339:["cbb681","cbb859",undefined],cbb859:["cbb339","cbb321",undefined],cbb321:["cbb859","cbb34",undefined],cbb34:["cbb321","cbb57",undefined],cbb57:["cbb34","cbb884",undefined],cbb884:["cbb57","cbb85",undefined],cbb85:["cbb884","cbb73",undefined],cbb73:["cbb85","cbb425",undefined],cbb425:["cbb73","cbb224",undefined],cbb224:["cbb425","cbb579",undefined],cbb579:["cbb224","cbb553",undefined],cbb553:["cbb579","cbb161",undefined],cbb161:["cbb553","cbb275",undefined],cbb275:["cbb161","cbb811",undefined],cbb811:["cbb275","cbb431",undefined],cbb431:["cbb811","cbb968",undefined],cbb968:["cbb431","cbb810",undefined],cbb810:["cbb968","cbb698",undefined],cbb698:["cbb810","cbb765",undefined],cbb765:["cbb698","cbb871",undefined],cbb871:["cbb765","cbb240",undefined],cbb240:["cbb871","cbb496",undefined],cbb496:["cbb240","cbb795",undefined],cbb795:["cbb496","cbb407",undefined],cbb407:["cbb795","cbb254",undefined],cbb254:["cbb407","cbb552",undefined],cbb552:["cbb254","cbb506",undefined],cbb506:["cbb552","cbb44",undefined],cbb44:["cbb506","cbb79",undefined],cbb79:["cbb44","cbb548",undefined],cbb548:["cbb79","cbb220",undefined],cbb220:["cbb548","cbb97",undefined],cbb97:["cbb220","cbb204",undefined],cbb204:["cbb97","cbb624",undefined],cbb624:["cbb204","cbb459",undefined],cbb459:["cbb624","cbb71",undefined],cbb71:["cbb459","cbb789",undefined],cbb789:["cbb71","cbb724",undefined],cbb724:["cbb789","cbb937",undefined],cbb937:["cbb724","cbb331",undefined],cbb331:["cbb937","cbb569",undefined],cbb569:["cbb331","cbb527",undefined],cbb527:["cbb569","cbb430",undefined],cbb430:["cbb527","cbb414",undefined],cbb414:["cbb430","cbb596",undefined],cbb596:["cbb414","cbb263",undefined],cbb263:["cbb596","cbb861",undefined],cbb861:["cbb263","cbb45",undefined],cbb45:["cbb861","cbb313",undefined],cbb313:["cbb45","cbb363",undefined],cbb363:["cbb313","cbb266",undefined],cbb266:["cbb363","cbb140",undefined],cbb140:["cbb266","cbb616",undefined],cbb616:["cbb140","cbb615",undefined],cbb615:["cbb616","cbb711",undefined],cbb711:["cbb615","cbb796",undefined],cbb796:["cbb711","cbb103",undefined],cbb103:["cbb796","cbb416",undefined],cbb416:["cbb103","cbb24",undefined],cbb24:["cbb416","cbb279",undefined],cbb279:["cbb24","cbb841",undefined],cbb841:["cbb279","cbb147",undefined],cbb147:["cbb841","cbb744",undefined],cbb744:["cbb147","cbb455",undefined],cbb455:["cbb744","cbb522",undefined],cbb522:["cbb455","cbb996",undefined],cbb996:["cbb522","cbb56",undefined],cbb56:["cbb996","cbb571",undefined],cbb571:["cbb56","cbb619",undefined],cbb619:["cbb571","cbb408",undefined],cbb408:["cbb619","cbb536",undefined],cbb536:["cbb408","cbb262",undefined],cbb262:["cbb536","cbb722",undefined],cbb722:["cbb262","cbb105",undefined],cbb105:["cbb722","cbb471",undefined],cbb471:["cbb105","cbb217",undefined],cbb217:["cbb471","cbb270",undefined],cbb270:["cbb217","cbb84",undefined],cbb84:["cbb270","cbb281",undefined],cbb281:["cbb84","cbb20",undefined],cbb20:["cbb281","cbb572",undefined],cbb572:["cbb20","cbb243",undefined],cbb243:["cbb572","cbb198",undefined],cbb198:["cbb243","cbb528",undefined],cbb528:["cbb198","cbb934",undefined],cbb934:["cbb528","cbb604",undefined],cbb604:["cbb934","cbb919",undefined],cbb919:["cbb604","cbb735",undefined],cbb735:["cbb919","cbb253",undefined],cbb253:["cbb735","cbb347",undefined],cbb347:["cbb253","cbb470",undefined],cbb470:["cbb347","cbb351",undefined],cbb351:["cbb470","cbb94",undefined],cbb94:["cbb351","cbb380",undefined],cbb380:["cbb94","cbb303",undefined],cbb303:["cbb380","cbb900",undefined],cbb900:["cbb303","cbb267",undefined],cbb267:["cbb900","cbb956",undefined],cbb956:["cbb267","cbb842",undefined],cbb842:["cbb956","cbb226",undefined],cbb226:["cbb842","cbb395",undefined],cbb395:["cbb226","cbb23",undefined],cbb23:["cbb395","cbb192",undefined],cbb192:["cbb23","cbb700",undefined],cbb700:["cbb192","cbb7",undefined],cbb7:["cbb700","cbb290",undefined],cbb290:["cbb7","cbb999",undefined],cbb999:["cbb290","cbb620",undefined],cbb620:["cbb999","cbb639",undefined],cbb639:["cbb620","cbb769",undefined],cbb769:["cbb639","cbb723",undefined],cbb723:["cbb769","cbb913",undefined],cbb913:["cbb723","cbb803",undefined],cbb803:["cbb913","cbb188",undefined],cbb188:["cbb803","cbb668",undefined],cbb668:["cbb188","cbb896",undefined],cbb896:["cbb668","cbb532",undefined],cbb532:["cbb896","cbb707",undefined],cbb707:["cbb532","cbb54",undefined],cbb54:["cbb707","cbb603",undefined],cbb603:["cbb54","cbb898",undefined],cbb898:["cbb603","cbb918",undefined],cbb918:["cbb898","cbb857",undefined],cbb857:["cbb918","cbb862",undefined],cbb862:["cbb857","cbb51",undefined],cbb51:["cbb862","cbb516",undefined],cbb516:["cbb51","cbb170",undefined],cbb170:["cbb516","cbb768",undefined],cbb768:["cbb170","cbb271",undefined],cbb271:["cbb768","cbb282",undefined],cbb282:["cbb271","cbb498",undefined],cbb498:["cbb282","cbb858",undefined],cbb858:["cbb498","cbb60",undefined],cbb60:["cbb858","cbb409",undefined],cbb409:["cbb60","cbb767",undefined],cbb767:["cbb409","cbb622",undefined],cbb622:["cbb767","cbb106",undefined],cbb106:["cbb622","cbb145",undefined],cbb145:["cbb106","cbb628",undefined],cbb628:["cbb145","cbb959",undefined],cbb959:["cbb628","cbb807",undefined],cbb807:["cbb959","cbb865",undefined],cbb865:["cbb807","cbb613",undefined],cbb613:["cbb865","cbb780",undefined],cbb780:["cbb613","cbb860",undefined],cbb860:["cbb780","cbb124",undefined],cbb124:["cbb860","cbb501",undefined],cbb501:["cbb124","cbb587",undefined],cbb587:["cbb501","cbb314",undefined],cbb314:["cbb587","cbb131",undefined],cbb131:["cbb314","cbb523",undefined],cbb523:["cbb131","cbb976",undefined],cbb976:["cbb523","cbb635",undefined],cbb635:["cbb976","cbb400",undefined],cbb400:["cbb635","cbb880",undefined],cbb880:["cbb400","cbb902",undefined],cbb902:["cbb880","cbb726",undefined],cbb726:["cbb902","cbb91",undefined],cbb91:["cbb726","cbb26",undefined],cbb26:["cbb91","cbb378",undefined],cbb378:["cbb26","cbb33",undefined],cbb33:["cbb378","cbb713",undefined],cbb713:["cbb33","cbb88",undefined],cbb88:["cbb713","cbb213",undefined],cbb213:["cbb88","cbb991",undefined],cbb991:["cbb213","cbb669",undefined],cbb669:["cbb991","cbb594",undefined],cbb594:["cbb669","cbb82",undefined],cbb82:["cbb594","cbb86",undefined],cbb86:["cbb82","cbb150",undefined],cbb150:["cbb86","cbb219",undefined],cbb219:["cbb150","cbb382",undefined],cbb382:["cbb219","cbb36",undefined],cbb36:["cbb382","cbb966",undefined],cbb966:["cbb36","cbb659",undefined],cbb659:["cbb966","cbb879",undefined],cbb879:["cbb659","cbb9",undefined],cbb9:["cbb879","cbb43",undefined],cbb43:["cbb9","cbb823",undefined],cbb823:["cbb43","cbb402",undefined],cbb402:["cbb823","cbb881",undefined],cbb881:["cbb402","cbb704",undefined],cbb704:["cbb881","cbb227",undefined],cbb227:["cbb704","cbb977",undefined],cbb977:["cbb227","cbb575",undefined],cbb575:["cbb977","cbb55",undefined],cbb55:["cbb575","cbb379",undefined],cbb379:["cbb55","cbb375",undefined],cbb375:["cbb379","cbb132",undefined],cbb132:["cbb375","cbb443",undefined],cbb443:["cbb132","cbb543",undefined],cbb543:["cbb443","cbb247",undefined],cbb247:["cbb543","cbb481",undefined],cbb481:["cbb247","cbb27",undefined],cbb27:["cbb481","cbb323",undefined],cbb323:["cbb27","cbb285",undefined],cbb285:["cbb323","cbb678",undefined],cbb678:["cbb285","cbb332",undefined],cbb332:["cbb678","cbb825",undefined],cbb825:["cbb332","cbb186",undefined],cbb186:["cbb825","cbb445",undefined],cbb445:["cbb186","cbb656",undefined],cbb656:["cbb445","cbb413",undefined],cbb413:["cbb656","cbb600",undefined],cbb600:["cbb413","cbb385",undefined],cbb385:["cbb600","cbb717",undefined],cbb717:["cbb385","cbb304",undefined],cbb304:["cbb717","cbb915",undefined],cbb915:["cbb304","cbb298",undefined],cbb298:["cbb915","cbb312",undefined],cbb312:["cbb298","cbb123",undefined],cbb123:["cbb312","cbb346",undefined],cbb346:["cbb123","cbb294",undefined],cbb294:["cbb346","cbb955",undefined],cbb955:["cbb294","cbb152",undefined],cbb152:["cbb955","cbb662",undefined],cbb662:["cbb152","cbb386",undefined],cbb386:["cbb662","cbb484",undefined],cbb484:["cbb386","cbb762",undefined],cbb762:["cbb484","cbb113",undefined],cbb113:["cbb762","cbb442",undefined],cbb442:["cbb113","cbb519",undefined],cbb519:["cbb442","cbb844",undefined],cbb844:["cbb519","cbb697",undefined],cbb697:["cbb844","cbb690",undefined],cbb690:["cbb697","cbb911",undefined],cbb911:["cbb690","cbb67",undefined],cbb67:["cbb911","cbb866",undefined],cbb866:["cbb67","cbb500",undefined],cbb500:["cbb866","cbb165",undefined],cbb165:["cbb500","cbb207",undefined],cbb207:["cbb165","cbb821",undefined],cbb821:["cbb207","cbb741",undefined],cbb741:["cbb821","cbb833",undefined],cbb833:["cbb741","cbb196",undefined],cbb196:["cbb833","cbb319",undefined],cbb319:["cbb196","cbb390",undefined],cbb390:["cbb319","cbb474",undefined],cbb474:["cbb390","cbb670",undefined],cbb670:["cbb474","cbb420",undefined],cbb420:["cbb670","cbb74",undefined],cbb74:["cbb420","cbb774",undefined],cbb774:["cbb74","cbb175",undefined],cbb175:["cbb774","cbb846",undefined],cbb846:["cbb175","cbb62",undefined],cbb62:["cbb846","cbb14",undefined],cbb14:["cbb62","cbb675",undefined],cbb675:["cbb14","cbb926",undefined],cbb926:["cbb675","cbb287",undefined],cbb287:["cbb926","cbb137",undefined],cbb137:["cbb287","cbb353",undefined],cbb353:["cbb137","cbb818",undefined],cbb818:["cbb353","cbb53",undefined],cbb53:["cbb818","cbb362",undefined],cbb362:["cbb53","cbb345",undefined],cbb345:["cbb362","cbb306",undefined],cbb306:["cbb345","cbb232",undefined],cbb232:["cbb306","cbb328",undefined],cbb328:["cbb232","cbb580",undefined],cbb580:["cbb328","cbb65",undefined],cbb65:["cbb580","cbb493",undefined],cbb493:["cbb65","cbb139",undefined],cbb139:["cbb493","cbb831",undefined],cbb831:["cbb139","cbb684",undefined],cbb684:["cbb831","cbb325",undefined],cbb325:["cbb684","cbb261",undefined],cbb261:["cbb325","cbb239",undefined],cbb239:["cbb261","cbb78",undefined],cbb78:["cbb239","cbb269",undefined],cbb269:["cbb78","cbb784",undefined],cbb784:["cbb269","cbb405",undefined],cbb405:["cbb784","cbb754",undefined],cbb754:["cbb405","cbb667",undefined],cbb667:["cbb754","cbb318",undefined],cbb318:["cbb667","cbb93",undefined],cbb93:["cbb318","cbb820",undefined],cbb820:["cbb93","cbb742",undefined],cbb742:["cbb820","cbb229",undefined],cbb229:["cbb742","cbb394",undefined],cbb394:["cbb229","cbb661",undefined],cbb661:["cbb394","cbb777",undefined],cbb777:["cbb661","cbb614",undefined],cbb614:["cbb777","cbb982",undefined],cbb982:["cbb614","cbb719",undefined],cbb719:["cbb982","cbb280",undefined],cbb280:["cbb719","cbb632",undefined],cbb632:["cbb280","cbb608",undefined],cbb608:["cbb632","cbb424",undefined],cbb424:["cbb608","cbb797",undefined],cbb797:["cbb424","cbb759",undefined],cbb759:["cbb797","cbb802",undefined],cbb802:["cbb759","cbb148",undefined],cbb148:["cbb802","cbb671",undefined],cbb671:["cbb148","cbb210",undefined],cbb210:["cbb671","cbb372",undefined],cbb372:["cbb210","cbb585",undefined],cbb585:["cbb372","cbb597",undefined],cbb597:["cbb585","cbb392",undefined],cbb392:["cbb597","cbb905",undefined],cbb905:["cbb392","cbb541",undefined],cbb541:["cbb905","cbb837",undefined],cbb837:["cbb541","cbb274",undefined],cbb274:["cbb837","cbb832",undefined],cbb832:["cbb274","cbb809",undefined],cbb809:["cbb832","cbb333",undefined],cbb333:["cbb809","cbb19",undefined],cbb19:["cbb333","cbb509",undefined],cbb509:["cbb19","cbb164",undefined],cbb164:["cbb509","cbb128",undefined],cbb128:["cbb164","cbb330",undefined],cbb330:["cbb128","cbb686",undefined],cbb686:["cbb330","cbb221",undefined],cbb221:["cbb686","cbb714",undefined],cbb714:["cbb221","cbb177",undefined],cbb177:["cbb714","cbb278",undefined],cbb278:["cbb177","cbb428",undefined],cbb428:["cbb278","cbb561",undefined],cbb561:["cbb428","cbb377",undefined],cbb377:["cbb561","cbb422",undefined],cbb422:["cbb377","cbb201",undefined],cbb201:["cbb422","cbb456",undefined],cbb456:["cbb201","cbb834",undefined],cbb834:["cbb456","cbb153",undefined],cbb153:["cbb834","cbb473",undefined],cbb473:["cbb153","cbb581",undefined],cbb581:["cbb473","cbb564",undefined],cbb564:["cbb581","cbb222",undefined],cbb222:["cbb564","cbb680",undefined],cbb680:["cbb222","cbb487",undefined],cbb487:["cbb680","cbb309",undefined],cbb309:["cbb487","cbb712",undefined],cbb712:["cbb309","cbb483",undefined],cbb483:["cbb712","cbb397",undefined],cbb397:["cbb483","cbb419",undefined],cbb419:["cbb397","cbb636",undefined],cbb636:["cbb419","cbb978",undefined],cbb978:["cbb636","cbb111",undefined],cbb111:["cbb978","cbb973",undefined],cbb973:["cbb111","cbb334",undefined],cbb334:["cbb973","cbb174",undefined],cbb174:["cbb334","cbb249",undefined],cbb249:["cbb174","cbb81",undefined],cbb81:["cbb249","cbb987",undefined],cbb987:["cbb81","cbb83",undefined],cbb83:["cbb987","cbb901",undefined],cbb901:["cbb83","cbb288",undefined],cbb288:["cbb901","cbb816",undefined],cbb816:["cbb288","cbb550",undefined],cbb550:["cbb816","cbb366",undefined],cbb366:["cbb550","cbb559",undefined],cbb559:["cbb366","cbb479",undefined],cbb479:["cbb559","cbb948",undefined],cbb948:["cbb479","cbb95",undefined],cbb95:["cbb948","cbb491",undefined],cbb491:["cbb95","cbb733",undefined],cbb733:["cbb491","cbb13",undefined],cbb13:["cbb733","cbb544",undefined],cbb544:["cbb13","cbb193",undefined],cbb193:["cbb544","cbb732",undefined],cbb732:["cbb193","cbb812",undefined],cbb812:["cbb732","cbb586",undefined],cbb586:["cbb812","cbb748",undefined],cbb748:["cbb586","cbb154",undefined],cbb154:["cbb748","cbb337",undefined],cbb337:["cbb154","cbb674",undefined],cbb674:["cbb337","cbb952",undefined],cbb952:["cbb674","cbb146",undefined],cbb146:["cbb952","cbb568",undefined],cbb568:["cbb146","cbb715",undefined],cbb715:["cbb568","cbb963",undefined],cbb963:["cbb715","cbb800",undefined],cbb800:["cbb963","cbb949",undefined],cbb949:["cbb800","cbb417",undefined],cbb417:["cbb949","cbb468",undefined],cbb468:["cbb417","cbb401",undefined],cbb401:["cbb468","cbb423",undefined],cbb423:["cbb401","cbb476",undefined],cbb476:["cbb423","cbb435",undefined],cbb435:["cbb476","cbb2",undefined],cbb2:["cbb435","cbb364",undefined],cbb364:["cbb2","cbb277",undefined],cbb277:["cbb364","cbb255",undefined],cbb255:["cbb277","cbb389",undefined],cbb389:["cbb255","cbb617",undefined],cbb617:["cbb389","cbb728",undefined],cbb728:["cbb617","cbb843",undefined],cbb843:["cbb728","cbb721",undefined],cbb721:["cbb843","cbb863",undefined],cbb863:["cbb721","cbb679",undefined],cbb679:["cbb863","cbb890",undefined],cbb890:["cbb679","cbb611",undefined],cbb611:["cbb890","cbb437",undefined],cbb437:["cbb611","cbb607",undefined],cbb607:["cbb437","cbb716",undefined],cbb716:["cbb607","cbb230",undefined],cbb230:["cbb716","cbb246",undefined],cbb246:["cbb230","cbb183",undefined],cbb183:["cbb246","cbb826",undefined],cbb826:["cbb183","cbb757",undefined],cbb757:["cbb826","cbb699",undefined],cbb699:["cbb757","cbb584",undefined],cbb584:["cbb699","cbb206",undefined],cbb206:["cbb584","cbb885",undefined],cbb885:["cbb206","cbb727",undefined],cbb727:["cbb885","cbb984",undefined],cbb984:["cbb727","cbb836",undefined],cbb836:["cbb984","cbb758",undefined],cbb758:["cbb836","cbb848",undefined],cbb848:["cbb758","cbb782",undefined],cbb782:["cbb848","cbb682",undefined],cbb682:["cbb782","cbb658",undefined],cbb658:["cbb682","cbb117",undefined],cbb117:["cbb658","cbb215",undefined],cbb215:["cbb117","cbb31",undefined],cbb31:["cbb215","cbb601",undefined],cbb601:["cbb31","cbb32",undefined],cbb32:["cbb601","cbb749",undefined],cbb749:["cbb32","cbb625",undefined],cbb625:["cbb749","cbb510",undefined],cbb510:["cbb625","cbb822",undefined],cbb822:["cbb510","cbb218",undefined],cbb218:["cbb822","cbb618",undefined],cbb618:["cbb218","cbb573",undefined],cbb573:["cbb618","cbb477",undefined],cbb477:["cbb573","cbb464",undefined],cbb464:["cbb477","cbb427",undefined],cbb427:["cbb464","cbb914",undefined],cbb914:["cbb427","cbb562",undefined],cbb562:["cbb914","cbb439",undefined],cbb439:["cbb562","cbb329",undefined],cbb329:["cbb439","cbb296",undefined],cbb296:["cbb329","cbb482",undefined],cbb482:["cbb296","cbb957",undefined],cbb957:["cbb482","cbb526",undefined],cbb526:["cbb957","cbb605",undefined],cbb605:["cbb526","cbb17",undefined],cbb17:["cbb605","cbb214",undefined],cbb214:["cbb17","cbb783",undefined],cbb783:["cbb214","cbb927",undefined],cbb927:["cbb783","cbb873",undefined],cbb873:["cbb927","cbb894",undefined],cbb894:["cbb873","cbb736",undefined],cbb736:["cbb894","cbb655",undefined],cbb655:["cbb736","cbb953",undefined],cbb953:["cbb655","cbb989",undefined],cbb989:["cbb953","cbb426",undefined],cbb426:["cbb989","cbb998",undefined],cbb998:["cbb426","cbb703",undefined],cbb703:["cbb998","cbb144",undefined],cbb144:["cbb703","cbb371",undefined],cbb371:["cbb144","cbb763",undefined],cbb763:["cbb371","cbb412",undefined],cbb412:["cbb763","cbb467",undefined],cbb467:["cbb412","cbb753",undefined],cbb753:["cbb467","cbb341",undefined],cbb341:["cbb753","cbb928",undefined],cbb928:["cbb341","cbb643",undefined],cbb643:["cbb928","cbb986",undefined],cbb986:["cbb643","cbb710",undefined],cbb710:["cbb986","cbb460",undefined],cbb460:["cbb710","cbb850",undefined],cbb850:["cbb460","cbb688",undefined],cbb688:["cbb850","cbb944",undefined],cbb944:["cbb688","cbb291",undefined],cbb291:["cbb944","cbb162",undefined],cbb162:["cbb291","cbb746",undefined],cbb746:["cbb162","cbb307",undefined],cbb307:["cbb746","cbb497",undefined],cbb497:["cbb307","cbb462",undefined],cbb462:["cbb497","cbb119",undefined],cbb119:["cbb462","cbb908",undefined],cbb908:["cbb119","cbb100",undefined],cbb100:["cbb908","cbb438",undefined],cbb438:["cbb100","cbb163",undefined],cbb163:["cbb438","cbb265",undefined],cbb265:["cbb163","cbb903",undefined],cbb903:["cbb265","cbb786",undefined],cbb786:["cbb903","cbb233",undefined],cbb233:["cbb786","cbb694",undefined],cbb694:["cbb233","cbb411",undefined],cbb411:["cbb694","cbb381",undefined],cbb381:["cbb411","cbb641",undefined],cbb641:["cbb381","cbb41",undefined],cbb41:["cbb641","cbb540",undefined],cbb540:["cbb41","cbb785",undefined],cbb785:["cbb540","cbb256",undefined],cbb256:["cbb785","cbb756",undefined],cbb756:["cbb256","cbb992",undefined],cbb992:["cbb756","cbb907",undefined],cbb907:["cbb992","cbb156",undefined],cbb156:["cbb907","cbb740",undefined],cbb740:["cbb156","cbb598",undefined],cbb598:["cbb740","cbb828",undefined],cbb828:["cbb598","cbb589",undefined],cbb589:["cbb828","cbb125",undefined],cbb125:["cbb589","cbb720",undefined],cbb720:["cbb125","cbb348",undefined],cbb348:["cbb720","cbb827",undefined],cbb827:["cbb348","cbb352",undefined],cbb352:["cbb827","cbb640",undefined],cbb640:["cbb352","cbb447",undefined],cbb447:["cbb640","cbb359",undefined],cbb359:["cbb447","cbb110",undefined],cbb110:["cbb359","cbb729",undefined],cbb729:["cbb110","cbb75",undefined],cbb75:["cbb729","cbb486",undefined],cbb486:["cbb75","cbb292",undefined],cbb292:["cbb486","cbb514",undefined],cbb514:["cbb292","cbb250",undefined],cbb250:["cbb514","cbb839",undefined],cbb839:["cbb250","cbb15",undefined],cbb15:["cbb839","cbb980",undefined],cbb980:["cbb15","cbb878",undefined],cbb878:["cbb980","cbb369",undefined],cbb369:["cbb878","cbb184",undefined],cbb184:["cbb369","cbb886",undefined],cbb886:["cbb184","cbb104",undefined],cbb104:["cbb886","cbb602",undefined],cbb602:["cbb104","cbb16",undefined],cbb16:["cbb602","cbb936",undefined],cbb936:["cbb16","cbb209",undefined],cbb209:["cbb936","cbb921",undefined],cbb921:["cbb209","cbb225",undefined],cbb225:["cbb921","cbb583",undefined],cbb583:["cbb225","cbb854",undefined],cbb854:["cbb583","cbb344",undefined],cbb344:["cbb854","cbb342",undefined],cbb342:["cbb344","cbb906",undefined],cbb906:["cbb342","cbb924",undefined],cbb924:["cbb906","cbb752",undefined],cbb752:["cbb924","cbb449",undefined],cbb449:["cbb752","cbb320",undefined],cbb320:["cbb449","cbb310",undefined],cbb310:["cbb320","cbb446",undefined],cbb446:["cbb310","cbb814",undefined],cbb814:["cbb446","cbb830",undefined],cbb830:["cbb814","cbb942",undefined],cbb942:["cbb830","cbb515",undefined],cbb515:["cbb942","cbb895",undefined],cbb895:["cbb515","cbb521",undefined],cbb521:["cbb895","cbb899",undefined],cbb899:["cbb521","cbb549",undefined],cbb549:["cbb899","cbb216",undefined],cbb216:["cbb549","cbb691",undefined],cbb691:["cbb216","cbb947",undefined],cbb947:["cbb691","cbb3",undefined],cbb3:["cbb947","cbb525",undefined],cbb525:["cbb3","cbb136",undefined],cbb136:["cbb525","cbb116",undefined],cbb116:["cbb136","cbb824",undefined],cbb824:["cbb116","cbb971",undefined],cbb971:["cbb824","cbb350",undefined],cbb350:["cbb971","cbb72",undefined],cbb72:["cbb350","cbb917",undefined],cbb917:["cbb72","cbb458",undefined],cbb458:["cbb917","cbb406",undefined],cbb406:["cbb458","cbb692",undefined],cbb692:["cbb406","cbb30",undefined],cbb30:["cbb692","cbb421",undefined],cbb421:["cbb30","cbb673",undefined],cbb673:["cbb421","cbb463",undefined],cbb463:["cbb673","cbb322",undefined],cbb322:["cbb463","cbb68",undefined],cbb68:["cbb322","cbb393",undefined],cbb393:["cbb68","cbb517",undefined],cbb517:["cbb393","cbb295",undefined],cbb295:["cbb517","cbb308",undefined],cbb308:["cbb295","cbb66",undefined],cbb66:["cbb308","cbb817",undefined],cbb817:["cbb66","cbb87",undefined],cbb87:["cbb817","cbb453",undefined],cbb453:["cbb87","cbb929",undefined],cbb929:["cbb453","cbb633",undefined],cbb633:["cbb929","cbb922",undefined],cbb922:["cbb633","cbb472",undefined],cbb472:["cbb922","cbb731",undefined],cbb731:["cbb472","cbb535",undefined],cbb535:["cbb731","cbb297",undefined],cbb297:["cbb535","cbb644",undefined],cbb644:["cbb297","cbb958",undefined],cbb958:["cbb644","cbb29",undefined],cbb29:["cbb958","cbb485",undefined],cbb485:["cbb29","cbb791",undefined],cbb791:["cbb485","cbb173",undefined],cbb173:["cbb791","cbb354",undefined],cbb354:["cbb173","cbb138",undefined],cbb138:["cbb354","cbb512",undefined],cbb512:["cbb138","cbb273",undefined],cbb273:["cbb512","cbb745",undefined],cbb745:["cbb273","cbb300",undefined],cbb300:["cbb745","cbb365",undefined],cbb365:["cbb300","cbb943",undefined],cbb943:["cbb365","cbb582",undefined],cbb582:["cbb943","cbb520",undefined],cbb520:["cbb582","cbb609",undefined],cbb609:["cbb520","cbb951",undefined],cbb951:["cbb609","cbb761",undefined],cbb761:["cbb951","cbb995",undefined],cbb995:["cbb761","cbb133",undefined],cbb133:["cbb995","cbb245",undefined],cbb245:["cbb133","",undefined]};this.s="cbb1";this.CFf=0;this.num=0;}function cltothis(e,c,i){for(let b in c){i==1?e[b]=undefined:e[b]=c[b];}}window._fetch=window.fetch;function cbb_jsvmp(i,e,c,n,l,b,f){function o(b,e,c){let i=b;for(;1==1;){if(b.hasOwnProperty(e)){debugger;try{b[e]=c;}catch(b){this[e]=c;return;}break;}else{debugger;b=b.__proto__;if(b==undefined||b==null){debugger;window[e]=c;return;}else{debugger;P=9;}}}}if(f!==undefined){debugger;var d;d=f["allthis"];e=f.duei;i=f.all;n=f.shuz;l=f.argsList;var s,t,u,a,r,h,_,v,P,g,p,w;_=f.a7;var z=f.args;var m=f.cbbb;}else{debugger;var d;b!==undefined?d=b:d=i;var s,t,u,a,r,h,_,v,P,g,p,w;var z=[];var m=i;}var y=[1,1,1,1,5,1,1,1,1,1,1,1,1,1,5,1,5,1,4,4,7,1,1,13,5,9,7,12,13,1,1,1,12,9,11,1,11,1,6,1,1,6,1,1,1,1,1,1,6,12,1,1,6,1,1,1,1,12,1,1,6,14,1,1,10,1,1,9,12,1,7,1,8,1,1,1,6,1,3,1,1,1,1,1,1,12,1,1,10,1,11,1,1,1,1,1,1,8,1,8,1,1,1,1,10,1,12,1,1,1,11,1,1,1,1,1,1,1,1,1,5,1,10,1,1,1,1,1,1,1,4,1,1,1,1,4,12,1,1,1,1,1,11,1,1,1,1,11,1,13,1,13,1,7,1,1,1,4,1,11,7,1,1,12,1,1,1,1,1,1,1,1,1,1,1,10,1,1,4,1,11,13,1,1,7,10,1,12,1,11,10,9,1,12,1,4,1,1,1,1,8,1,1,6,1,1,1,10,1,1,10,1,1,1,1,1,13,5,1,13,12,1,1,1,9,1,4,1,1,1,8,5,1,6,1,12,1,1,1,1,1,1,10,1,1,1,1,1,1,1,5,1,1,9,1,1,1,11,1,1,1,4,1,1,1,1,1,1,1,1,1,6,13,5,1,11,1,1,1,1,1,10,7,1,1,1,1,14,1,1,1,10,4,1,1,1,1,7,3,1,1,1,6,1,1,1,1,7,1,1,1,1,1,1,4,1,1,1,1,1,1,1,1,1,11,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,11,1,5,1,4,5,1,1,1,1,1,1,1,10,11,6,9,1,12,1,1,5,7,5,5,12,1,9,1,13,1,7,6,1,1,8,1,1,1,3,10,1,12,1,1,1,1,3,1,1,4,1,5,1,1,13,1,10,1,13,1,1,1,1,1,1,1,12,4,9,1,1,1,1,1,4,1,1,10,1,1,12,1,1,1,4,6,1,1,13,11,13,10,1,1,1,1,1,1,7,1,9,1,8,6,4,1,5,1,1,1,1,9,1,1,8,1,1,12,13,3,1,1,12,1,4,1,13,13,7,1,1,1,1,1,1,4,1,3,1,1,1,5,1,1,1,11,1,1,1,1,13,13,1,1,1,1,1,9,1,1,5,1,1,5,1,12,1,1,11,4,13,12,1,1,1,8,11,1,1,12,1,1,1,10,9,13,6,1,1,1,1,1,9,1,8,11,10,1,1,5,1,1,1,7,5,1,1,1,5,1,1,1,13,1,1,7,1,1,1,4,1,1,1,11,1,1,1,1,1,13,1,13,1,1,1,1,1,1,1,5,6,9,5,1,4,1,1,1,1,1,1,7,1,1,1,1,1,1,1,1,1,1,10,4,7,6,1,1,1,1,1,1,1,6,1,1,7,7,1,1,1,5,1,1,1,7,6,1,4,1,1,10,9,1,1,1,1,7,1,10,1,1,1,1,10,11,1,1,1,1,7,1,1,1,9,1,5,1,1,1,1,9,12,1,11,9,4,9,1,5,1,11,1,1,1,1,1,1,1,1,1,1,1,1,1,11,12,1,1,12,1,7,1,1,1,10,1,11,1,11,1,1,10,1,1,1,13,1,1,1,1,1,1,1,1,1,8,1,1,8,8,1,1,11,9,1,1,5,10,1,1,1,1,3,5,12,1,1,12,1,1,1,1,1,9,11,1,1,1,6,10,9,1,1,6,6,1,1,4,1,1,1,11,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,6,1,1,4,1,1,1,1,6,12,6,10,1,1,9,1,1,4,1,12,1,1,1,1,11,1,6,1,1,1,12,6,1,3,1,9,6,11,1,7,5,1,1,1,1,1,4,9,6,1,4,1,5,1,1,10,1,1,1,6,12,1,1,1,1,1,12,1,1,1,1,9,1,1,1,1,1,1,1,1,1,1,1,1,4,6,1,5,1,11,5,1,1,7,1,9,1,1,5,1,5,1,1,1,12,1,1,1,1,1,7,1,1,4,1,6,8,1,1,9,1,1,4,9,1,12,1,13,1,1,1,1,1,4,1,1,1,1,4,1,1,1,1,9,12,1,1,1,5,1,5,1,4,1,1,1,5,7,1,1,3,1,1,1,1,1,1,1,1,1,1,1,11,1,6,1,11,1,1,1,10,1,1,1,1,1,1,1,1,5,1,1,10,1,1,1,6,1,1,6,9,1,6,13,5,1,1,1,1,12,1,1,1,1,1,1,1,1,10,1,9,1,1,1,12,10,9,6,1,1,10,9,4,1,1,9,6,7,1,9,1,1,1,7,6,1,1,1,8,1,1,1,10,1,1,1,12,1,1,12,1,10,1,1,1,1,5,1,13,12,1,11,10,1,1,1,1,1,6,11,1,1,1,1,13,1,1,1,10,1,1,11,7,8,1,1,1,1,12,1,1,1,1,1,5,1,7,13,6,1,1,11,1,1,6,7,1,4,14,1,12,1,7,1,1,1,1,1,4,9,1,1,1,1,12,8,1,1,1,1,5,8,1,1,1,1,10,1,8,1,1,1,7,1,1,12,8,9,1,1,1,1,1,1,10,1,1,1,1,8,1,4,5,5,12,11,1,1,1,7,9,1,1,1,1,13,1,8,1,1,1,8,1,1,1,1,1,1,1,9,11,1,1,1,1,1,1,1,11,1,1,1,1,4,1,1,1,4,10,1,6,6,1,1,1,1,5,8,7,1,13,1,9,1,1,1,1,1,8,1,5,8,1,1,1,6,1,1,1,1,7,13,9,4,1,1,9,1,1,12,12,10,1,13,1,1,1,1,1,11,1,1,1,1,1,1,1,1,5,1,4,7,1,1,1,1,1,13,1,6,1,12,1,1,1,1,5,1,1,1,1,1,9,7,1,13,1,1,11,1,13,1,6,11,1,1,1,1,1,5,1,4,1,12,4,1,1,1,1,1,1,1,1,1,1,1,1,6,1,1,1,7,3,1,1,1,1,7,1,1,1,6,1,13,1,8,5,1,1,13,1,1,1,9,1,1,8,1,9,13,1,1,1,1,1,13,6,11,7,6,1,11,9,1,1,1,1,1,1,1,1,1,1,1,8,1,1,1,1,7,1,11,12,1,12,1,1,6,1,7,1,1,10,1,1,4,5,1,1,13,1,1,1,9,1,1,9,1,8,7,5,1,1,1,1,1,1,1,1,1,1,1,1,1,6,1,1,4,1,13,1,1,1,9,1,10,10,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,10,9,1,1,9,1,1,13,1,1,1,1,1,6,7,1,1,4,6,1,1,1,1,4,1,1,1,9,5,6,1,1,10,1,13,1,1,1,1,11,6,1,1,13,1,1,1,1,12,1,1,1,1,1,12,1,1,1,5,11,1,1,1,1,11,1,6,10,1,1,1,4,1,1,1,1,1,12,4,1,1,2,1,7,1,1,4,1,1,7,1,1,1,8,1,4,1,1,1,1,1,1,1,1,1,1,1,1,5,1,1,5,1,12,8,1,12,11,1,1,1,1,7,1,1,1,1,1,8,1,1,1,1,1,1,1,1,10,10,12,1,7,1,1,1,8,12,11,5,1,4,1,1,1,4,1,8,1,1,8,1,11,13,1,1,1,1,1,8,1,8,1,12,1,1,8,1,1,1,1,1,9,1,1,8,1,1,6,10,1,8,9,7,13,1,13,1,9,1,10,1,1,5,1,7,1,1,1,4,1,7,1,1,1,6,1,5,11,1,1,1,1,1,1,5,6,5,1,1,1,1,1,1,11,1,13,1,1,1,9,7,1,1,10,9,1,6,1,11,1,1,12,1,7,9,12,1,1,1,1,1,1,13,4,1,1,10,1,1,4,13,1,4,13,11,9,1,1,1,1,1,8,11,10,3,1,1,1,5,8,11,1,8,1,1,1,1,1,3,6,1,1,1,6,1,1,1,1,6,1,1,1,1,12,1,8,1,6,1,1,9,10,1,9,11,1,4,1,3,1,1,1,1,1,13,13,1,1,1,1,1,11,1,1,1,1,1,1,1,4,1,1,8,10,1,1,1,10,1,1,1,7,4,1,1,1,1,10,1,1,10,1,1,1,1,11,12,1,3,10,1,1,13,1,6,6,12,1,9,1,1,5,1,1,1,1,1,7,8,1,5,1,8,1,13,1,1,5,1,1,1,1,10,1,1,1,1,5,6,1,1,1,1,5,9,1,4,11,1,1,8,1,1,1,1,1,4,1,5,1,1,5,1,1,1,1,4,1,1,1,1,1,1,1,4,7,11,1,1,11,1,1,1,1,1,1,3,1,1,1,1,1,10,1,1,1,1,8,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,1,1,1,9,1,7,1,1,1,1,1,1,5,1,1,1,1,6,7,1,1,1,1,9,1,1,1,1,4,5,10,1,1,1,1,1,1,1,1,1,1,8,1,12,5,1,13,3,5,5,1,1,1,1,10,1,1,1,9,9,1,1,1,1,4,6,1,8,5,1,9,13,1,1,1,1,1,1,1,11,6,1,6,5,1,10,7,1,1,10,1,11,1,1,7,4,6,12,1,1,1,1,1,13,1,1,1,3,1,1,1,13,1,1,1,7,1,11,1,6,1,1,1,1,1,1,1,1,1,4,1,7,6,1,1,1,1,1,1,1,7,1,1,1,9,13,1,1,1,1,8,1,1,1,5,1,1,1,1,1,1,1,13,1,1,1,1,6,10,1,8,1,1,12,5,1,6,1,1,1,12,1,1,1,1,13,1,6,10,4,1,9,1,1,1,1,1,13,1,11,1,7,13,8,1,5,1,1,4,1,1,1,1,1,11,8,1,11,1,1,1,1,1,5,1,8,1,1,1,1,8,5,13,1,12,1,1,4,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,4,13,1,1,1,6,1,1,1,1,1,5,1,8,6,7,12,1,12,6,1,13,1,1,1,1,1,1,3,1,7,7,1,11,1,1,1,1,1,1,1,8,1,1,1,12,1,1,1,1,1,12,1,1,1,1,1,1,1,1,1,7,1,1,1,1,1,1,1,1,13,7,5,3,1,6,1,10,11,1,1,1,8,1,5,4,11,1,1,12,1,1,1,6,1,6,10,4,13,1,1,13,1,12,1,7,1,1,1,9,1,1,1,5,1,12,11,1,1,1,1,11,1,1,1,1,1,1,6,9,1,14,7,1,1,1,11,1,1,1,1,1,9,1,1,1,1,1,1,1,1,1,1,8,13,1,1,1,6,1,10,1,1,1,1,4,1,1,9,1,1,1,1,6,1,8,1,8,1,5,1,1,9,1,1,1,1,1,1,1,1,5,1,1,4,1,1,1,1,8,1,1,1,1,12,12,13,1,1,1,1,6,1,1,1,1,1,1,13,1,1,1,1,1,1,1,8,1,1,1,1,7,5,9,1,1,1,1,10,1,1,1,1,1,1,1,5,4,9,1,4,1,1,4,12,13,9,1,10,1,11,1,1,1,6,1,1,7,1,1,7,6,1,1,1,1,12,1,1,13,8,1,9,1,1,1,1,10,6,11,11,1,1,1,1,1,1,1,4,1,7,1,1,4,1,1,1,1,1,1,1,1,1,13,6,1,13,1,7,3,1,1,12,1,8,1,1,1,1,5,1,1,1,1,1,6,1,1,1,1,1,1,13,1,1,11,1,5,1,1,1,1,1,11,1,1,1,1,7,1,13,1,1,1,5,10,13,14,7,5,1,1,1,1,1,1,4,1,11,1,1,1,5,1,9,8,1,1,1,1,1,1,1,6,1,7,1,12,1,1,8,1,1,4,1,1,9,1,1,1,1,1,7,6,7,13,1,13,1,1,1,1,1,1,1,10,1,1,11,6,1,1,7,5,1,1,10,1,9,13,1,4,1,1,1,4,1,1,10,5,1,1,10,1,9,7,7,1,7,1,1,1,9,1,1,1,1,1,1,1,1,4,7,1,1,1,6,12,1,1,8,1,1,1,1,12,5,1,1,1,9,11,1,1,1,1,1,1,1,8,12,1,1,1,1,7,1,1,1,1,1,1,1,1,1,1,1,1,4,1,1,1,1,6,1,1,9,9,1,3,1,1,1,1,1,14,1,1,1,12,1,1,6,13,7,4,1,10,1,12,1,6,1,1,1,1,1,13,10,1,1,7,5,1,12,1,1,7,5,1,6,1,1,1,8,11,5,4,1,1,1,1,1,1,12,7,1,1,1,1,1,6,9,8,10,1,1,11,11,8,4,5,1,10,12,1,1,1,1,1,1,1,9,1,1,6,5,1,1,11,13,1,1,1,1,1,1,13,7,1,1,1,12,1,1,1,13,9,1,1,6,1,1,1,1,1,1,1,1,4,1,8,8,1,1,1,13,1,1,1,1,1,1,8,4,1,11,1,13,7,1,1,1,1,1,1,11,1,1,1,9,1,1,4,1,1,1,1,1,1,1,6,1,1,1,1,1,4,1,4,13,1,5,1,5,1,4,12,7,1,1,1,7,12,1,9,6,1,1,9,13,1,1,4,11,1,3,1,1,1,1,10,1,1,5,1,1,13,7,1,13,1,5,1,9,1,6,4,12,1,5,1,7,10,1,1,9,1,1,1,1,1,6,1,1,1,1,1,5,1,1,1,1,1,12,11,11,1,1,7,1,3,1,1,1,13,6,1,1,6,13,8,1,7,9,1,12,1,1,10,1,1,1,9,1,1,1,8,8,1,10,1,1,12,7,1,1,6,1,1,1,1,1,1,12,1,6,8,1,1,5,4,1,1,1,1,1,1,13,1,1,1,8,11,12,7,1,10,7,1,8,1,1,1,8,8,1,6,1,7,11,8,1,1,1,1,11,1,11,1,1,10,1,12,1,1,10,1,1,1,6,1,1,1,6,5,5,1,1,1,1,1,13,1,1,7,1,1,1,1,1,1,13,4,1,1,13,1,13,1,1,1,5,1,1,1,8,1,1,1,1,1,1,12,13,1,1,1,13,1,1,1,1,11,6,1,10,1,1,1,4,10,6,1,13,1,1,1,1,1,6,6,1,1,1,1,1,8,5,1,12,10,7,5,1,1,1,1,1,1,12,11,1,1,6,1,1,1,6,1,1,1,10,1,1,1,5,1,1,6,10,1,8,1,1,1,11,1,1,1,1,1,1,1,1,1,1,1,1,9,8,1,1,1,12,1,12,10,1,1,1,1,1,10,1,1,1,1,11,1,1,1,6,13,1,5,1,1,1,1,10,1,1,13,1,1,10,1,1,3,1,1,1,1,1,1,1,13,10,1,1,1,10,1,1,6,1,11,1,1,1,1,1,1,1,1,1,7,1,1,1,1,9,1,1,12,1,4,1,1,1,8,12,1,13,5,4,6,5,11,1,1,4,8,1,6,1,4,10,12,3,1,8,1,1,1,1,3,1,1,1,13,12,11,10,1,4,1,1,1,1,8,1,13,6,4,1,6,1,13,9,1,6,10,1,10,1,1,1,1,1,9,1,7,1,1,1,1,1,1,9,1,10,1,1,6,1,10,1,5,1,1,12,11,8,1,1,1,1,1,1,10,1,1,1,4,1,1,1,8,1,1,1,1,1,1,4,1,1,1,1,9,1,1,3,13,13,1,1,1,1,1,1,1,1,1,1,9,10,1,1,1,1,1,9,1,10,4,1,13,1,1,9,1,7,1,8,1,12,5,1,9,1,4,1,1,1,1,6,1,11,1,11,6,4,4,1,1,1,1,1,1,9,6,13,4,7,9,1,1,1,6,1,1,1,6,7,9,1,8,1,1,8,12,6,8,9,1,1,1,1,1,6,1,1,1,1,1,1,1,1,1,11,11,1,10,6,1,1,1,1,1,12,11,1,1,5,1,1,6,1,1,1,1,1,1,1,7,12,9,1,1,10,1,1,5,1,1,10,1,1,11,1,4,1,9,12,1,1,11,6,5,4,1,9,1,1,11,12,9,1,8,1,7,1,1,9,6,1,1,1,12,11,1,1,1,1,1,1,6,1,1,1,13,1,1,1,1,1,1,1,8,12,13,1,1,13,11,1,7,8,12,1,1,1,10,1,1,4,1,1,1,1,1,1,1,13,1,8,1,1,3,8,1,7,7,1,1,12,1,1,1,11,1,1,1,1,1,1,3,1,1,1,1,8,1,1,3,1,1,1,1,11,1,1,1,5,1,12,1,1,1,1,1,1,8,1,1,10,9,1,1,1,1,9,1,1,1,1,1,4,1,1,1,4,1,1,7,1,10,1,8,1,4,1,13,1,1,1,11,1,5,1,9,1,4,1,1,8,1,1,1,1,11,1,1,9,1,4,6,1,1,1,13,11,10,6,1,12,1,1,1,1,1,1,1,1,12,1,1,1,10,1,1,1,1,1,1,1,1,1,13,1,6,1,1,11,1,1,1,1,1,1,6,9,1,1,1,1,8,4,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,6,1,12,1,1,12,5,1,1,1,6,1,4,4,1,13,8,1,1,1,1,1,1,1,1,1,1,1,10,1,1,11,11,1,1,1,1,1,1,6,1,1,1,10,1,12,10,1,1,12,1,1,1,1,1,1,11,1,1,7,1,1,1,9,1,1,8,1,1,1,8,1,1,1,1,1,1,1,1,1,1,13,5,8,8,1,11,1,12,1,1,10,1,1,1,11,1,1,1,11,1,1,1,8,12,11,1,13,1,10,1,8,1,1,1,6,1,1,8,1,1,1,1,1,1,1,6,4,1,7,7,1,1,1,11,5,1,12,1];function j(){for(let b=0;b<y.length;b++){y[b]==1?y[b]=e.cf.bind(e).call.bind(e.cf.bind(e),e.cf.bind(e)):y[b]==2?y[b]=e.sf.bind(e).call.bind(e.sf.bind(e),e.sf.bind(e)):y[b]==3?y[b]=e.Cf.bind(e).call.bind(e.Cf.bind(e),e.Cf.bind(e)):y[b]=e.cF.bind(e).call.bind(e.cF.bind(e),e.cF.bind(e));}}j();var x=y[0],C=y[1],F=y[2],k=y[3],L=y[4],E=y[5],R=y[6],O=y[7],N=y[8],q=y[9],A=y[10],B=y[11],D=y[12],G=y[13],H=y[14],I=y[15],J=y[16],K=y[17],M=y[18],Q=y[19],S=y[20],T=y[21],U=y[22],V=y[23],W=y[24],X=y[25],Y=y[26],Z=y[27],$=y[28],b1=y[29],e1=y[30],c1=y[31],i1=y[32],n1=y[33],l1=y[34],f1=y[35],o1=y[36],d1=y[37],s1=y[38],t1=y[39],u1=y[40],a1=y[41],r1=y[42],h1=y[43],_1=y[44],v1=y[45],P1=y[46],g1=y[47],p1=y[48],w1=y[49],z1=y[50],m1=y[51],y1=y[52],j1=y[53],x1=y[54],C1=y[55],F1=y[56],k1=y[57],L1=y[58],E1=y[59],R1=y[60],O1=y[61],N1=y[62],q1=y[63],A1=y[64],B1=y[65],D1=y[66],G1=y[67],H1=y[68],I1=y[69],J1=y[70],K1=y[71],M1=y[72],Q1=y[73],S1=y[74],T1=y[75],U1=y[76],V1=y[77],W1=y[78],X1=y[79],Y1=y[80],Z1=y[81],$1=y[82],bb=y[83],eb=y[84],cb=y[85],ib=y[86],nb=y[87],lb=y[88],fb=y[89],ob=y[90],db=y[91],sb=y[92],tb=y[93],ub=y[94],ab=y[95],rb=y[96],hb=y[97],_b=y[98],vb=y[99],Pb=y[100],gb=y[101],pb=y[102],wb=y[103],zb=y[104],mb=y[105],yb=y[106],jb=y[107],xb=y[108],Cb=y[109],Fb=y[110],kb=y[111],Lb=y[112],Eb=y[113],Rb=y[114],Ob=y[115],Nb=y[116],qb=y[117],Ab=y[118],Bb=y[119],Db=y[120],Gb=y[121],Hb=y[122],Ib=y[123],Jb=y[124],Kb=y[125],Mb=y[126],Qb=y[127],Sb=y[128],Tb=y[129],Ub=y[130],Vb=y[131],Wb=y[132],Xb=y[133],Yb=y[134],Zb=y[135],$b=y[136],be=y[137],ee=y[138],ce=y[139],ie=y[140],ne=y[141],le=y[142],fe=y[143],oe=y[144],de=y[145],se=y[146],te=y[147],ue=y[148],ae=y[149],re=y[150],he=y[151],_e=y[152],ve=y[153],Pe=y[154],ge=y[155],pe=y[156],we=y[157],ze=y[158],me=y[159],ye=y[160],je=y[161],xe=y[162],Ce=y[163],Fe=y[164],ke=y[165],Le=y[166],Ee=y[167],Re=y[168],Oe=y[169],Ne=y[170],qe=y[171],Ae=y[172],Be=y[173],De=y[174],Ge=y[175],He=y[176],Ie=y[177],Je=y[178],Ke=y[179],Me=y[180],Qe=y[181],Se=y[182],Te=y[183],Ue=y[184],Ve=y[185],We=y[186],Xe=y[187],Ye=y[188],Ze=y[189],$e=y[190],b2=y[191],e2=y[192],c2=y[193],i2=y[194],n2=y[195],l2=y[196],f2=y[197],o2=y[198],d2=y[199],s2=y[200],t2=y[201],u2=y[202],a2=y[203],r2=y[204],h2=y[205],_2=y[206],v2=y[207],P2=y[208],g2=y[209],p2=y[210],w2=y[211],z2=y[212],m2=y[213],y2=y[214],j2=y[215],x2=y[216],C2=y[217],F2=y[218],k2=y[219],L2=y[220],E2=y[221],R2=y[222],O2=y[223],N2=y[224],q2=y[225],A2=y[226],B2=y[227],D2=y[228],G2=y[229],H2=y[230],I2=y[231],J2=y[232],K2=y[233],M2=y[234],Q2=y[235],S2=y[236],T2=y[237],U2=y[238],V2=y[239],W2=y[240],X2=y[241],Y2=y[242],Z2=y[243],$2=y[244],b3=y[245],e3=y[246],c3=y[247],i3=y[248],n3=y[249],l3=y[250],f3=y[251],o3=y[252],d3=y[253],s3=y[254],t3=y[255],u3=y[256],a3=y[257],r3=y[258],h3=y[259],_3=y[260],v3=y[261],P3=y[262],g3=y[263],p3=y[264],w3=y[265],z3=y[266],m3=y[267],y3=y[268],j3=y[269],x3=y[270],C3=y[271],F3=y[272],k3=y[273],L3=y[274],E3=y[275],R3=y[276],O3=y[277],N3=y[278],q3=y[279],A3=y[280],B3=y[281],D3=y[282],G3=y[283],H3=y[284],I3=y[285],J3=y[286],K3=y[287],M3=y[288],Q3=y[289],S3=y[290],T3=y[291],U3=y[292],V3=y[293],W3=y[294],X3=y[295],Y3=y[296],Z3=y[297],$3=y[298],bc=y[299],ec=y[300],cc=y[301],ic=y[302],nc=y[303],lc=y[304],fc=y[305],oc=y[306],dc=y[307],sc=y[308],tc=y[309],uc=y[310],ac=y[311],rc=y[312],hc=y[313],_c=y[314],vc=y[315],Pc=y[316],gc=y[317],pc=y[318],wc=y[319],zc=y[320],mc=y[321],yc=y[322],jc=y[323],xc=y[324],Cc=y[325],Fc=y[326],kc=y[327],Lc=y[328],Ec=y[329],Rc=y[330],Oc=y[331],Nc=y[332],qc=y[333],Ac=y[334],Bc=y[335],Dc=y[336],Gc=y[337],Hc=y[338],Ic=y[339],Jc=y[340],Kc=y[341],Mc=y[342],Qc=y[343],Sc=y[344],Tc=y[345],Uc=y[346],Vc=y[347],Wc=y[348],Xc=y[349],Yc=y[350],Zc=y[351],$c=y[352],bi=y[353],ei=y[354],ci=y[355],ii=y[356],ni=y[357],li=y[358],fi=y[359],oi=y[360],di=y[361],si=y[362],ti=y[363],ui=y[364],ai=y[365],ri=y[366],hi=y[367],_i=y[368],vi=y[369],Pi=y[370],gi=y[371],pi=y[372],wi=y[373],zi=y[374],mi=y[375],yi=y[376],ji=y[377],xi=y[378],Ci=y[379],Fi=y[380],ki=y[381],Li=y[382],Ei=y[383],Ri=y[384],Oi=y[385],Ni=y[386],qi=y[387],Ai=y[388],Bi=y[389],Di=y[390],Gi=y[391],Hi=y[392],Ii=y[393],Ji=y[394],Ki=y[395],Mi=y[396],Qi=y[397],Si=y[398],Ti=y[399],Ui=y[400],Vi=y[401],Wi=y[402],Xi=y[403],Yi=y[404],Zi=y[405],$i=y[406],bn=y[407],en=y[408],cn=y[409],nn=y[410],ln=y[411],fn=y[412],on=y[413],dn=y[414],sn=y[415],tn=y[416],un=y[417],an=y[418],rn=y[419],hn=y[420],_n=y[421],vn=y[422],Pn=y[423],gn=y[424],pn=y[425],wn=y[426],zn=y[427],mn=y[428],yn=y[429],jn=y[430],xn=y[431],Cn=y[432],Fn=y[433],kn=y[434],Ln=y[435],En=y[436],Rn=y[437],On=y[438],Nn=y[439],qn=y[440],An=y[441],Bn=y[442],Dn=y[443],Gn=y[444],Hn=y[445],In=y[446],Jn=y[447],Kn=y[448],Mn=y[449],Qn=y[450],Sn=y[451],Tn=y[452],Un=y[453],Vn=y[454],Wn=y[455],Xn=y[456],Yn=y[457],Zn=y[458],$n=y[459],b0=y[460],e0=y[461],c0=y[462],i0=y[463],n0=y[464],l0=y[465],f0=y[466],o0=y[467],d0=y[468],s0=y[469],t0=y[470],u0=y[471],a0=y[472],r0=y[473],h0=y[474],_0=y[475],v0=y[476],P0=y[477],g0=y[478],p0=y[479],w0=y[480],z0=y[481],m0=y[482],y0=y[483],j0=y[484],x0=y[485],C0=y[486],F0=y[487],k0=y[488],L0=y[489],E0=y[490],R0=y[491],O0=y[492],N0=y[493],q0=y[494],A0=y[495],B0=y[496],D0=y[497],G0=y[498],H0=y[499],I0=y[500],J0=y[501],K0=y[502],M0=y[503],Q0=y[504],S0=y[505],T0=y[506],U0=y[507],V0=y[508],W0=y[509],X0=y[510],Y0=y[511],Z0=y[512],$0=y[513],b7=y[514],e7=y[515],c7=y[516],i7=y[517],n7=y[518],l7=y[519],f7=y[520],o7=y[521],d7=y[522],s7=y[523],t7=y[524],u7=y[525],a7=y[526],r7=y[527],h7=y[528],_7=y[529],v7=y[530],P7=y[531],g7=y[532],p7=y[533],w7=y[534],z7=y[535],m7=y[536],y7=y[537],j7=y[538],x7=y[539],C7=y[540],F7=y[541],k7=y[542],L7=y[543],E7=y[544],R7=y[545],O7=y[546],N7=y[547],q7=y[548],A7=y[549],B7=y[550],D7=y[551],G7=y[552],H7=y[553],I7=y[554],J7=y[555],K7=y[556],M7=y[557],Q7=y[558],S7=y[559],T7=y[560],U7=y[561],V7=y[562],W7=y[563],X7=y[564],Y7=y[565],Z7=y[566],$7=y[567],b5=y[568],e5=y[569],c5=y[570],i5=y[571],n5=y[572],l5=y[573],f5=y[574],o5=y[575],d5=y[576],s5=y[577],t5=y[578],u5=y[579],a5=y[580],r5=y[581],h5=y[582],_5=y[583],v5=y[584],P5=y[585],g5=y[586],p5=y[587],w5=y[588],z5=y[589],m5=y[590],y5=y[591],j5=y[592],x5=y[593],C5=y[594],F5=y[595],k5=y[596],L5=y[597],E5=y[598],R5=y[599],O5=y[600],N5=y[601],q5=y[602],A5=y[603],B5=y[604],D5=y[605],G5=y[606],H5=y[607],I5=y[608],J5=y[609],K5=y[610],M5=y[611],Q5=y[612],S5=y[613],T5=y[614],U5=y[615],V5=y[616],W5=y[617],X5=y[618],Y5=y[619],Z5=y[620],$5=y[621],b8=y[622],e8=y[623],c8=y[624],i8=y[625],n8=y[626],l8=y[627],f8=y[628],o8=y[629],d8=y[630],s8=y[631],t8=y[632],u8=y[633],a8=y[634],r8=y[635],h8=y[636],_8=y[637],v8=y[638],P8=y[639],g8=y[640],p8=y[641],w8=y[642],z8=y[643],m8=y[644],y8=y[645],j8=y[646],x8=y[647],C8=y[648],F8=y[649],k8=y[650],L8=y[651],E8=y[652],R8=y[653],O8=y[654],N8=y[655],q8=y[656],A8=y[657],B8=y[658],D8=y[659],G8=y[660],H8=y[661],I8=y[662],J8=y[663],K8=y[664],M8=y[665],Q8=y[666],S8=y[667],T8=y[668],U8=y[669],V8=y[670],W8=y[671],X8=y[672],Y8=y[673],Z8=y[674],$8=y[675],b6=y[676],e6=y[677],c6=y[678],i6=y[679],n6=y[680],l6=y[681],f6=y[682],o6=y[683],d6=y[684],s6=y[685],t6=y[686],u6=y[687],a6=y[688],r6=y[689],h6=y[690],_6=y[691],v6=y[692],P6=y[693],g6=y[694],p6=y[695],w6=y[696],z6=y[697],m6=y[698],y6=y[699],j6=y[700],x6=y[701],C6=y[702],F6=y[703],k6=y[704],L6=y[705],E6=y[706],R6=y[707],O6=y[708],N6=y[709],q6=y[710],A6=y[711],B6=y[712],D6=y[713],G6=y[714],H6=y[715],I6=y[716],J6=y[717],K6=y[718],M6=y[719],Q6=y[720],S6=y[721],T6=y[722],U6=y[723],V6=y[724],W6=y[725],X6=y[726],Y6=y[727],Z6=y[728],$6=y[729],b9=y[730],e9=y[731],c9=y[732],i9=y[733],n9=y[734],l9=y[735],f9=y[736],o9=y[737],d9=y[738],s9=y[739],t9=y[740],u9=y[741],a9=y[742],r9=y[743],h9=y[744],_9=y[745],v9=y[746],P9=y[747],g9=y[748],p9=y[749],w9=y[750],z9=y[751],m9=y[752],y9=y[753],j9=y[754],x9=y[755],C9=y[756],F9=y[757],k9=y[758],L9=y[759],E9=y[760],R9=y[761],O9=y[762],N9=y[763],q9=y[764],A9=y[765],B9=y[766],D9=y[767],G9=y[768],H9=y[769],I9=y[770],J9=y[771],K9=y[772],M9=y[773],Q9=y[774],S9=y[775],T9=y[776],U9=y[777],V9=y[778],W9=y[779],X9=y[780],Y9=y[781],Z9=y[782],$9=y[783],b4=y[784],e4=y[785],c4=y[786],i4=y[787],n4=y[788],l4=y[789],f4=y[790],o4=y[791],d4=y[792],s4=y[793],t4=y[794],u4=y[795],a4=y[796],r4=y[797],h4=y[798],_4=y[799],v4=y[800],P4=y[801],g4=y[802],p4=y[803],w4=y[804],z4=y[805],m4=y[806],y4=y[807],j4=y[808],x4=y[809],C4=y[810],F4=y[811],k4=y[812],L4=y[813],E4=y[814],R4=y[815],O4=y[816],N4=y[817],q4=y[818],A4=y[819],B4=y[820],D4=y[821],G4=y[822],H4=y[823],I4=y[824],J4=y[825],K4=y[826],M4=y[827],Q4=y[828],S4=y[829],T4=y[830],U4=y[831],V4=y[832],W4=y[833],X4=y[834],Y4=y[835],Z4=y[836],$4=y[837],bl=y[838],el=y[839],cl=y[840],il=y[841],nl=y[842],ll=y[843],fl=y[844],ol=y[845],dl=y[846],sl=y[847],tl=y[848],ul=y[849],al=y[850],rl=y[851],hl=y[852],_l=y[853],vl=y[854],Pl=y[855],gl=y[856],pl=y[857],wl=y[858],zl=y[859],ml=y[860],yl=y[861],jl=y[862],xl=y[863],Cl=y[864],Fl=y[865],kl=y[866],Ll=y[867],El=y[868],Rl=y[869],Ol=y[870],Nl=y[871],ql=y[872],Al=y[873],Bl=y[874],Dl=y[875],Gl=y[876],Hl=y[877],Il=y[878],Jl=y[879],Kl=y[880],Ml=y[881],Ql=y[882],Sl=y[883],Tl=y[884],Ul=y[885],Vl=y[886],Wl=y[887],Xl=y[888],Yl=y[889],Zl=y[890],$l=y[891],bf=y[892],ef=y[893],cf=y[894],nf=y[895],lf=y[896],ff=y[897],of=y[898],df=y[899],sf=y[900],tf=y[901],uf=y[902],af=y[903],rf=y[904],hf=y[905],_f=y[906],vf=y[907],Pf=y[908],gf=y[909],pf=y[910],wf=y[911],zf=y[912],mf=y[913],yf=y[914],jf=y[915],xf=y[916],Cf=y[917],Ff=y[918],kf=y[919],Lf=y[920],Ef=y[921],Rf=y[922],Of=y[923],Nf=y[924],qf=y[925],Af=y[926],Bf=y[927],Df=y[928],Gf=y[929],Hf=y[930],If=y[931],Jf=y[932],Kf=y[933],Mf=y[934],Qf=y[935],Sf=y[936],Tf=y[937],Uf=y[938],Vf=y[939],Wf=y[940],Xf=y[941],Yf=y[942],Zf=y[943],$f=y[944],bo=y[945],eo=y[946],co=y[947],io=y[948],no=y[949],lo=y[950],fo=y[951],oo=y[952],so=y[953],to=y[954],uo=y[955],ao=y[956],ro=y[957],ho=y[958],_o=y[959],vo=y[960],Po=y[961],go=y[962],po=y[963],wo=y[964],zo=y[965],mo=y[966],yo=y[967],jo=y[968],xo=y[969],Co=y[970],Fo=y[971],ko=y[972],Lo=y[973],Eo=y[974],Ro=y[975],Oo=y[976],No=y[977],qo=y[978],Ao=y[979],Bo=y[980],Do=y[981],Go=y[982],Ho=y[983],Io=y[984],Jo=y[985],Ko=y[986],Mo=y[987],Qo=y[988],So=y[989],To=y[990],Uo=y[991],Vo=y[992],Wo=y[993],Xo=y[994],Yo=y[995],Zo=y[996],$o=y[997],bd=y[998],ed=y[999],cd=y[1e3],id=y[1001],nd=y[1002],ld=y[1003],fd=y[1004],od=y[1005],dd=y[1006],sd=y[1007],td=y[1008],ud=y[1009],ad=y[1010],rd=y[1011],hd=y[1012],_d=y[1013],vd=y[1014],Pd=y[1015],gd=y[1016],pd=y[1017],wd=y[1018],zd=y[1019],md=y[1020],yd=y[1021],jd=y[1022],xd=y[1023],Cd=y[1024],Fd=y[1025],kd=y[1026],Ld=y[1027],Ed=y[1028],Rd=y[1029],Od=y[1030],Nd=y[1031],qd=y[1032],Ad=y[1033],Bd=y[1034],Dd=y[1035],Gd=y[1036],Hd=y[1037],Id=y[1038],Jd=y[1039],Kd=y[1040],Md=y[1041],Qd=y[1042],Sd=y[1043],Td=y[1044],Ud=y[1045],Vd=y[1046],Wd=y[1047],Xd=y[1048],Yd=y[1049],Zd=y[1050],$d=y[1051],bs=y[1052],es=y[1053],cs=y[1054],is=y[1055],ns=y[1056],ls=y[1057],fs=y[1058],os=y[1059],ds=y[1060],ss=y[1061],ts=y[1062],us=y[1063],as=y[1064],rs=y[1065],hs=y[1066],_s=y[1067],vs=y[1068],Ps=y[1069],gs=y[1070],ps=y[1071],ws=y[1072],zs=y[1073],ms=y[1074],ys=y[1075],js=y[1076],xs=y[1077],Cs=y[1078],Fs=y[1079],ks=y[1080],Ls=y[1081],Es=y[1082],Rs=y[1083],Os=y[1084],Ns=y[1085],qs=y[1086],As=y[1087],Bs=y[1088],Ds=y[1089],Gs=y[1090],Hs=y[1091],Is=y[1092],Js=y[1093],Ks=y[1094],Ms=y[1095],Qs=y[1096],Ss=y[1097],Ts=y[1098],Us=y[1099],Vs=y[1100],Ws=y[1101],Xs=y[1102],Ys=y[1103],Zs=y[1104],$s=y[1105],bt=y[1106],et=y[1107],ct=y[1108],it=y[1109],nt=y[1110],lt=y[1111],ft=y[1112],ot=y[1113],dt=y[1114],st=y[1115],tt=y[1116],ut=y[1117],at=y[1118],rt=y[1119],ht=y[1120],_t=y[1121],vt=y[1122],Pt=y[1123],gt=y[1124],pt=y[1125],wt=y[1126],zt=y[1127],mt=y[1128],yt=y[1129],jt=y[1130],xt=y[1131],Ct=y[1132],Ft=y[1133],kt=y[1134],Lt=y[1135],Et=y[1136],Rt=y[1137],Ot=y[1138],Nt=y[1139],qt=y[1140],At=y[1141],Bt=y[1142],Dt=y[1143],Gt=y[1144],Ht=y[1145],It=y[1146],Jt=y[1147],Kt=y[1148],Mt=y[1149],Qt=y[1150],St=y[1151],Tt=y[1152],Ut=y[1153],Vt=y[1154],Wt=y[1155],Xt=y[1156],Yt=y[1157],Zt=y[1158],$t=y[1159],bu=y[1160],eu=y[1161],cu=y[1162],iu=y[1163],nu=y[1164],lu=y[1165],fu=y[1166],ou=y[1167],du=y[1168],su=y[1169],tu=y[1170],uu=y[1171],au=y[1172],ru=y[1173],hu=y[1174],_u=y[1175],vu=y[1176],Pu=y[1177],gu=y[1178],pu=y[1179],wu=y[1180],zu=y[1181],mu=y[1182],yu=y[1183],ju=y[1184],xu=y[1185],Cu=y[1186],Fu=y[1187],ku=y[1188],Lu=y[1189],Eu=y[1190],Ru=y[1191],Ou=y[1192],Nu=y[1193],qu=y[1194],Au=y[1195],Bu=y[1196],Du=y[1197],Gu=y[1198],Hu=y[1199],Iu=y[1200],Ju=y[1201],Ku=y[1202],Mu=y[1203],Qu=y[1204],Su=y[1205],Tu=y[1206],Uu=y[1207],Vu=y[1208],Wu=y[1209],Xu=y[1210],Yu=y[1211],Zu=y[1212],$u=y[1213],ba=y[1214],ea=y[1215],ca=y[1216],ia=y[1217],na=y[1218],la=y[1219],fa=y[1220],oa=y[1221],da=y[1222],sa=y[1223],ta=y[1224],ua=y[1225],aa=y[1226],ra=y[1227],ha=y[1228],_a=y[1229],va=y[1230],Pa=y[1231],ga=y[1232],pa=y[1233],wa=y[1234],za=y[1235],ma=y[1236],ya=y[1237],ja=y[1238],xa=y[1239],Ca=y[1240],Fa=y[1241],ka=y[1242],La=y[1243],Ea=y[1244],Ra=y[1245],Oa=y[1246],Na=y[1247],qa=y[1248],Aa=y[1249],Ba=y[1250],Da=y[1251],Ga=y[1252],Ha=y[1253],Ia=y[1254],Ja=y[1255],Ka=y[1256],Ma=y[1257],Qa=y[1258],Sa=y[1259],Ta=y[1260],Ua=y[1261],Va=y[1262],Wa=y[1263],Xa=y[1264],Ya=y[1265],Za=y[1266],$a=y[1267],br=y[1268],er=y[1269],cr=y[1270],ir=y[1271],nr=y[1272],lr=y[1273],fr=y[1274],or=y[1275],dr=y[1276],sr=y[1277],tr=y[1278],ur=y[1279],ar=y[1280],rr=y[1281],hr=y[1282],_r=y[1283],vr=y[1284],Pr=y[1285],gr=y[1286],pr=y[1287],wr=y[1288],zr=y[1289],mr=y[1290],yr=y[1291],jr=y[1292],xr=y[1293],Cr=y[1294],Fr=y[1295],kr=y[1296],Lr=y[1297],Er=y[1298],Rr=y[1299],Or=y[1300],Nr=y[1301],qr=y[1302],Ar=y[1303],Br=y[1304],Dr=y[1305],Gr=y[1306],Hr=y[1307],Ir=y[1308],Jr=y[1309],Kr=y[1310],Mr=y[1311],Qr=y[1312],Sr=y[1313],Tr=y[1314],Ur=y[1315],Vr=y[1316],Wr=y[1317],Xr=y[1318],Yr=y[1319],Zr=y[1320],$r=y[1321],bh=y[1322],eh=y[1323],ch=y[1324],ih=y[1325],nh=y[1326],lh=y[1327],fh=y[1328],oh=y[1329],dh=y[1330],sh=y[1331],th=y[1332],uh=y[1333],ah=y[1334],rh=y[1335],hh=y[1336],_h=y[1337],vh=y[1338],Ph=y[1339],gh=y[1340],ph=y[1341],wh=y[1342],zh=y[1343],mh=y[1344],yh=y[1345],jh=y[1346],xh=y[1347],Ch=y[1348],Fh=y[1349],kh=y[1350],Lh=y[1351],Eh=y[1352],Rh=y[1353],Oh=y[1354],Nh=y[1355],qh=y[1356],Ah=y[1357],Bh=y[1358],Dh=y[1359],Gh=y[1360],Hh=y[1361],Ih=y[1362],Jh=y[1363],Kh=y[1364],Mh=y[1365],Qh=y[1366],Sh=y[1367],Th=y[1368],Uh=y[1369],Vh=y[1370],Wh=y[1371],Xh=y[1372],Yh=y[1373],Zh=y[1374],$h=y[1375],b_=y[1376],e_=y[1377],c_=y[1378],i_=y[1379],n_=y[1380],l_=y[1381],f_=y[1382],o_=y[1383],d_=y[1384],s_=y[1385],t_=y[1386],u_=y[1387],a_=y[1388],r_=y[1389],h_=y[1390],__=y[1391],v_=y[1392],P_=y[1393],g_=y[1394],p_=y[1395],w_=y[1396],z_=y[1397],m_=y[1398],y_=y[1399],j_=y[1400],x_=y[1401],C_=y[1402],F_=y[1403],k_=y[1404],L_=y[1405],E_=y[1406],R_=y[1407],O_=y[1408],N_=y[1409],q_=y[1410],A_=y[1411],B_=y[1412],D_=y[1413],G_=y[1414],H_=y[1415],I_=y[1416],J_=y[1417],K_=y[1418],M_=y[1419],Q_=y[1420],S_=y[1421],T_=y[1422],U_=y[1423],V_=y[1424],W_=y[1425],X_=y[1426],Y_=y[1427],Z_=y[1428],$_=y[1429],bv=y[1430],ev=y[1431],cv=y[1432],iv=y[1433],nv=y[1434],lv=y[1435],fv=y[1436],ov=y[1437],dv=y[1438],sv=y[1439],tv=y[1440],uv=y[1441],av=y[1442],rv=y[1443],hv=y[1444],_v=y[1445],vv=y[1446],Pv=y[1447],gv=y[1448],pv=y[1449],wv=y[1450],zv=y[1451],mv=y[1452],yv=y[1453],jv=y[1454],xv=y[1455],Cv=y[1456],Fv=y[1457],kv=y[1458],Lv=y[1459],Ev=y[1460],Rv=y[1461],Ov=y[1462],Nv=y[1463],qv=y[1464],Av=y[1465],Bv=y[1466],Dv=y[1467],Gv=y[1468],Hv=y[1469],Iv=y[1470],Jv=y[1471],Kv=y[1472],Mv=y[1473],Qv=y[1474],Sv=y[1475],Tv=y[1476],Uv=y[1477],Vv=y[1478],Wv=y[1479],Xv=y[1480],Yv=y[1481],Zv=y[1482],$v=y[1483],bP=y[1484],eP=y[1485],cP=y[1486],iP=y[1487],nP=y[1488],lP=y[1489],fP=y[1490],oP=y[1491],dP=y[1492],sP=y[1493],tP=y[1494],uP=y[1495],aP=y[1496],rP=y[1497],hP=y[1498],_P=y[1499],vP=y[1500],PP=y[1501],gP=y[1502],pP=y[1503],wP=y[1504],zP=y[1505],mP=y[1506],yP=y[1507],jP=y[1508],xP=y[1509],CP=y[1510],FP=y[1511],kP=y[1512],LP=y[1513],EP=y[1514],RP=y[1515],OP=y[1516],NP=y[1517],qP=y[1518],AP=y[1519],BP=y[1520],DP=y[1521],GP=y[1522],HP=y[1523],IP=y[1524],JP=y[1525],KP=y[1526],MP=y[1527],QP=y[1528],SP=y[1529],TP=y[1530],UP=y[1531],VP=y[1532],WP=y[1533],XP=y[1534],YP=y[1535],ZP=y[1536],$P=y[1537],bg=y[1538],eg=y[1539],cg=y[1540],ig=y[1541],ng=y[1542],lg=y[1543],fg=y[1544],og=y[1545],dg=y[1546],sg=y[1547],tg=y[1548],ug=y[1549],ag=y[1550],rg=y[1551],hg=y[1552],_g=y[1553],vg=y[1554],Pg=y[1555],gg=y[1556],pg=y[1557],wg=y[1558],zg=y[1559],mg=y[1560],yg=y[1561],jg=y[1562],xg=y[1563],Cg=y[1564],Fg=y[1565],kg=y[1566],Lg=y[1567],Eg=y[1568],Rg=y[1569],Og=y[1570],Ng=y[1571],qg=y[1572],Ag=y[1573],Bg=y[1574],Dg=y[1575],Gg=y[1576],Hg=y[1577],Ig=y[1578],Jg=y[1579],Kg=y[1580],Mg=y[1581],Qg=y[1582],Sg=y[1583],Tg=y[1584],Ug=y[1585],Vg=y[1586],Wg=y[1587],Xg=y[1588],Yg=y[1589],Zg=y[1590],$g=y[1591],bp=y[1592],ep=y[1593],cp=y[1594],ip=y[1595],np=y[1596],lp=y[1597],fp=y[1598],op=y[1599],dp=y[1600],sp=y[1601],tp=y[1602],up=y[1603],ap=y[1604],rp=y[1605],hp=y[1606],_p=y[1607],vp=y[1608],Pp=y[1609],gp=y[1610],pp=y[1611],wp=y[1612],zp=y[1613],mp=y[1614],yp=y[1615],jp=y[1616],xp=y[1617],Cp=y[1618],Fp=y[1619],kp=y[1620],Lp=y[1621],Ep=y[1622],Rp=y[1623],Op=y[1624],Np=y[1625],qp=y[1626],Ap=y[1627],Bp=y[1628],Dp=y[1629],Gp=y[1630],Hp=y[1631],Ip=y[1632],Jp=y[1633],Kp=y[1634],Mp=y[1635],Qp=y[1636],Sp=y[1637],Tp=y[1638],Up=y[1639],Vp=y[1640],Wp=y[1641],Xp=y[1642],Yp=y[1643],Zp=y[1644],$p=y[1645],bw=y[1646],ew=y[1647],cw=y[1648],iw=y[1649],nw=y[1650],lw=y[1651],fw=y[1652],ow=y[1653],dw=y[1654],sw=y[1655],tw=y[1656],uw=y[1657],aw=y[1658],rw=y[1659],hw=y[1660],_w=y[1661],vw=y[1662],Pw=y[1663],gw=y[1664],pw=y[1665],ww=y[1666],zw=y[1667],mw=y[1668],yw=y[1669],jw=y[1670],xw=y[1671],Cw=y[1672],Fw=y[1673],kw=y[1674],Lw=y[1675],Ew=y[1676],Rw=y[1677],Ow=y[1678],Nw=y[1679],qw=y[1680],Aw=y[1681],Bw=y[1682],Dw=y[1683],Gw=y[1684],Hw=y[1685],Iw=y[1686],Jw=y[1687],Kw=y[1688],Mw=y[1689],Qw=y[1690],Sw=y[1691],Tw=y[1692],Uw=y[1693],Vw=y[1694],Ww=y[1695],Xw=y[1696],Yw=y[1697],Zw=y[1698],$w=y[1699],bz=y[1700],ez=y[1701],cz=y[1702],iz=y[1703],nz=y[1704],lz=y[1705],fz=y[1706],oz=y[1707],dz=y[1708],sz=y[1709],tz=y[1710],uz=y[1711],az=y[1712],rz=y[1713],hz=y[1714],_z=y[1715],vz=y[1716],Pz=y[1717],gz=y[1718],pz=y[1719],wz=y[1720],zz=y[1721],mz=y[1722],yz=y[1723],jz=y[1724],xz=y[1725],Cz=y[1726],Fz=y[1727],kz=y[1728],Lz=y[1729],Ez=y[1730],Rz=y[1731],Oz=y[1732],Nz=y[1733],qz=y[1734],Az=y[1735],Bz=y[1736],Dz=y[1737],Gz=y[1738],Hz=y[1739],Iz=y[1740],Jz=y[1741],Kz=y[1742],Mz=y[1743],Qz=y[1744],Sz=y[1745],Tz=y[1746],Uz=y[1747],Vz=y[1748],Wz=y[1749],Xz=y[1750],Yz=y[1751],Zz=y[1752],$z=y[1753],bm=y[1754],em=y[1755],cm=y[1756],im=y[1757],nm=y[1758],lm=y[1759],fm=y[1760],om=y[1761],dm=y[1762],sm=y[1763],tm=y[1764],um=y[1765],am=y[1766],rm=y[1767],hm=y[1768],_m=y[1769],vm=y[1770],Pm=y[1771],gm=y[1772],pm=y[1773],wm=y[1774],zm=y[1775],mm=y[1776],ym=y[1777],jm=y[1778],xm=y[1779],Cm=y[1780],Fm=y[1781],km=y[1782],Lm=y[1783],Em=y[1784],Rm=y[1785],Om=y[1786],Nm=y[1787],qm=y[1788],Am=y[1789],Bm=y[1790],Dm=y[1791],Gm=y[1792],Hm=y[1793],Im=y[1794],Jm=y[1795],Km=y[1796],Mm=y[1797],Qm=y[1798],Sm=y[1799],Tm=y[1800],Um=y[1801],Vm=y[1802],Wm=y[1803],Xm=y[1804],Ym=y[1805],Zm=y[1806],$m=y[1807],by=y[1808],ey=y[1809],cy=y[1810],iy=y[1811],ny=y[1812],ly=y[1813],fy=y[1814],oy=y[1815],dy=y[1816],sy=y[1817],ty=y[1818],uy=y[1819],ay=y[1820],ry=y[1821],hy=y[1822],_y=y[1823],vy=y[1824],Py=y[1825],gy=y[1826],py=y[1827],wy=y[1828],zy=y[1829],my=y[1830],yy=y[1831],jy=y[1832],xy=y[1833],Cy=y[1834],Fy=y[1835],ky=y[1836],Ly=y[1837],Ey=y[1838],Ry=y[1839],Oy=y[1840],Ny=y[1841],qy=y[1842],Ay=y[1843],By=y[1844],Dy=y[1845],Gy=y[1846],Hy=y[1847],Iy=y[1848],Jy=y[1849],Ky=y[1850],My=y[1851],Qy=y[1852],Sy=y[1853],Ty=y[1854],Uy=y[1855],Vy=y[1856],Wy=y[1857],Xy=y[1858],Yy=y[1859],Zy=y[1860],$y=y[1861],bj=y[1862],ej=y[1863],cj=y[1864],ij=y[1865],nj=y[1866],lj=y[1867],fj=y[1868],oj=y[1869],dj=y[1870],sj=y[1871],tj=y[1872],uj=y[1873],aj=y[1874],rj=y[1875],hj=y[1876],_j=y[1877],vj=y[1878],Pj=y[1879],gj=y[1880],pj=y[1881],wj=y[1882],zj=y[1883],mj=y[1884],yj=y[1885],jj=y[1886],xj=y[1887],Cj=y[1888],Fj=y[1889],kj=y[1890],Lj=y[1891],Ej=y[1892],Rj=y[1893],Oj=y[1894],Nj=y[1895],qj=y[1896],Aj=y[1897],Bj=y[1898],Dj=y[1899],Gj=y[1900],Hj=y[1901],Ij=y[1902],Jj=y[1903],Kj=y[1904],Mj=y[1905],Qj=y[1906],Sj=y[1907],Tj=y[1908],Uj=y[1909],Vj=y[1910],Wj=y[1911],Xj=y[1912],Yj=y[1913],Zj=y[1914],$j=y[1915],bx=y[1916],ex=y[1917],cx=y[1918],ix=y[1919],nx=y[1920],lx=y[1921],fx=y[1922],ox=y[1923],dx=y[1924],sx=y[1925],tx=y[1926],ux=y[1927],ax=y[1928],rx=y[1929],hx=y[1930],_x=y[1931],vx=y[1932],Px=y[1933],gx=y[1934],px=y[1935],wx=y[1936],zx=y[1937],mx=y[1938],yx=y[1939],jx=y[1940],xx=y[1941],Cx=y[1942],Fx=y[1943],kx=y[1944],Lx=y[1945],Ex=y[1946],Rx=y[1947],Ox=y[1948],Nx=y[1949],qx=y[1950],Ax=y[1951],Bx=y[1952],Dx=y[1953],Gx=y[1954],Hx=y[1955],Ix=y[1956],Jx=y[1957],Kx=y[1958],Mx=y[1959],Qx=y[1960],Sx=y[1961],Tx=y[1962],Ux=y[1963],Vx=y[1964],Wx=y[1965],Xx=y[1966],Yx=y[1967],Zx=y[1968],$x=y[1969],bC=y[1970],eC=y[1971],cC=y[1972],iC=y[1973],nC=y[1974],lC=y[1975],fC=y[1976],oC=y[1977],dC=y[1978],sC=y[1979],tC=y[1980],uC=y[1981],aC=y[1982],rC=y[1983],hC=y[1984],_C=y[1985],vC=y[1986],PC=y[1987],gC=y[1988],pC=y[1989],wC=y[1990],zC=y[1991],mC=y[1992],yC=y[1993],jC=y[1994],xC=y[1995],CC=y[1996],FC=y[1997],kC=y[1998],LC=y[1999],EC=y[2e3],RC=y[2001],OC=y[2002],NC=y[2003],qC=y[2004],AC=y[2005],BC=y[2006],DC=y[2007],GC=y[2008],HC=y[2009],IC=y[2010],JC=y[2011],KC=y[2012],MC=y[2013],QC=y[2014],SC=y[2015],TC=y[2016],UC=y[2017],VC=y[2018],WC=y[2019],XC=y[2020],YC=y[2021],ZC=y[2022],$C=y[2023],bF=y[2024],eF=y[2025],cF=y[2026],iF=y[2027],nF=y[2028],lF=y[2029],fF=y[2030],oF=y[2031],dF=y[2032],sF=y[2033],tF=y[2034],uF=y[2035],aF=y[2036],rF=y[2037],hF=y[2038],_F=y[2039],vF=y[2040],PF=y[2041],gF=y[2042],pF=y[2043],wF=y[2044],zF=y[2045],mF=y[2046],yF=y[2047],jF=y[2048],xF=y[2049],CF=y[2050],FF=y[2051],kF=y[2052],LF=y[2053],EF=y[2054],RF=y[2055],OF=y[2056],NF=y[2057],qF=y[2058],AF=y[2059],BF=y[2060],DF=y[2061],GF=y[2062],HF=y[2063],IF=y[2064],JF=y[2065],KF=y[2066],MF=y[2067],QF=y[2068],SF=y[2069],TF=y[2070],UF=y[2071],VF=y[2072],WF=y[2073],XF=y[2074],YF=y[2075],ZF=y[2076],$F=y[2077],bk=y[2078],ek=y[2079],ck=y[2080],ik=y[2081],nk=y[2082],lk=y[2083],fk=y[2084],ok=y[2085],dk=y[2086],sk=y[2087],tk=y[2088],uk=y[2089],ak=y[2090],rk=y[2091],hk=y[2092],_k=y[2093],vk=y[2094],Pk=y[2095],gk=y[2096],pk=y[2097],wk=y[2098],zk=y[2099],mk=y[2100],yk=y[2101],jk=y[2102],xk=y[2103],Ck=y[2104],Fk=y[2105],kk=y[2106],Lk=y[2107],Ek=y[2108],Rk=y[2109],Ok=y[2110],Nk=y[2111],qk=y[2112],Ak=y[2113],Bk=y[2114],Dk=y[2115],Gk=y[2116],Hk=y[2117],Ik=y[2118],Jk=y[2119],Kk=y[2120],Mk=y[2121],Qk=y[2122],Sk=y[2123],Tk=y[2124],Uk=y[2125],Vk=y[2126],Wk=y[2127],Xk=y[2128],Yk=y[2129],Zk=y[2130],$k=y[2131],bL=y[2132],eL=y[2133],cL=y[2134],iL=y[2135],nL=y[2136],lL=y[2137],fL=y[2138],oL=y[2139],dL=y[2140],sL=y[2141],tL=y[2142],uL=y[2143],aL=y[2144],rL=y[2145],hL=y[2146],_L=y[2147],vL=y[2148],PL=y[2149],gL=y[2150],pL=y[2151],wL=y[2152],zL=y[2153],mL=y[2154],yL=y[2155],jL=y[2156],xL=y[2157],CL=y[2158],FL=y[2159],kL=y[2160],LL=y[2161],EL=y[2162],RL=y[2163],OL=y[2164],NL=y[2165],qL=y[2166],AL=y[2167],BL=y[2168],DL=y[2169],GL=y[2170],HL=y[2171],IL=y[2172],JL=y[2173],KL=y[2174],ML=y[2175],QL=y[2176],SL=y[2177],TL=y[2178],UL=y[2179],VL=y[2180],WL=y[2181],XL=y[2182],YL=y[2183],ZL=y[2184],$L=y[2185],bE=y[2186],eE=y[2187],cE=y[2188],iE=y[2189],nE=y[2190],lE=y[2191],fE=y[2192],oE=y[2193],dE=y[2194],sE=y[2195],tE=y[2196],uE=y[2197],aE=y[2198],rE=y[2199],hE=y[2200],_E=y[2201],vE=y[2202],PE=y[2203],gE=y[2204],pE=y[2205],wE=y[2206],zE=y[2207],mE=y[2208],yE=y[2209],jE=y[2210],xE=y[2211],CE=y[2212],FE=y[2213],kE=y[2214],LE=y[2215],EE=y[2216],RE=y[2217],OE=y[2218],NE=y[2219],qE=y[2220],AE=y[2221],BE=y[2222],DE=y[2223],GE=y[2224],HE=y[2225],IE=y[2226],JE=y[2227],KE=y[2228],ME=y[2229],QE=y[2230],SE=y[2231],TE=y[2232],UE=y[2233],VE=y[2234],WE=y[2235],XE=y[2236],YE=y[2237],ZE=y[2238],$E=y[2239],bR=y[2240],eR=y[2241],cR=y[2242],iR=y[2243],nR=y[2244],lR=y[2245],fR=y[2246],oR=y[2247],dR=y[2248],sR=y[2249],tR=y[2250],uR=y[2251],aR=y[2252],rR=y[2253],hR=y[2254],_R=y[2255],vR=y[2256],PR=y[2257],gR=y[2258],pR=y[2259],wR=y[2260],zR=y[2261],mR=y[2262],yR=y[2263],jR=y[2264],xR=y[2265],CR=y[2266],FR=y[2267],kR=y[2268],LR=y[2269],ER=y[2270],RR=y[2271],OR=y[2272],NR=y[2273],qR=y[2274],AR=y[2275],BR=y[2276],DR=y[2277],GR=y[2278],HR=y[2279],IR=y[2280],JR=y[2281],KR=y[2282],MR=y[2283],QR=y[2284],SR=y[2285],TR=y[2286],UR=y[2287],VR=y[2288],WR=y[2289],XR=y[2290],YR=y[2291],ZR=y[2292],$R=y[2293],bO=y[2294],eO=y[2295],cO=y[2296],iO=y[2297],nO=y[2298],lO=y[2299],fO=y[2300],oO=y[2301],dO=y[2302],sO=y[2303],tO=y[2304],uO=y[2305],aO=y[2306],rO=y[2307],hO=y[2308],_O=y[2309],vO=y[2310],PO=y[2311],gO=y[2312],pO=y[2313],wO=y[2314],zO=y[2315],mO=y[2316],yO=y[2317],jO=y[2318],xO=y[2319],CO=y[2320],FO=y[2321],kO=y[2322],LO=y[2323],EO=y[2324],RO=y[2325],OO=y[2326],NO=y[2327],qO=y[2328],AO=y[2329],BO=y[2330],DO=y[2331],GO=y[2332],HO=y[2333],IO=y[2334],JO=y[2335],KO=y[2336],MO=y[2337],QO=y[2338],SO=y[2339],TO=y[2340],UO=y[2341],VO=y[2342],WO=y[2343],XO=y[2344],YO=y[2345],ZO=y[2346],$O=y[2347],bN=y[2348],eN=y[2349],cN=y[2350],iN=y[2351],nN=y[2352],lN=y[2353],fN=y[2354],oN=y[2355],dN=y[2356],sN=y[2357],tN=y[2358],uN=y[2359],aN=y[2360],rN=y[2361],hN=y[2362],_N=y[2363],vN=y[2364],PN=y[2365],gN=y[2366],pN=y[2367],wN=y[2368],zN=y[2369],mN=y[2370],yN=y[2371],jN=y[2372],xN=y[2373],CN=y[2374],FN=y[2375],kN=y[2376],LN=y[2377],EN=y[2378],RN=y[2379],ON=y[2380],NN=y[2381],qN=y[2382],AN=y[2383],BN=y[2384],DN=y[2385],GN=y[2386],HN=y[2387],IN=y[2388],JN=y[2389],KN=y[2390],MN=y[2391],QN=y[2392],SN=y[2393],TN=y[2394],UN=y[2395],VN=y[2396],WN=y[2397],XN=y[2398],YN=y[2399],ZN=y[2400],$N=y[2401],bq=y[2402],eq=y[2403],cq=y[2404],iq=y[2405],nq=y[2406],lq=y[2407],fq=y[2408],oq=y[2409],dq=y[2410],sq=y[2411],tq=y[2412],uq=y[2413],aq=y[2414],rq=y[2415],hq=y[2416],_q=y[2417],vq=y[2418],Pq=y[2419],gq=y[2420],pq=y[2421],wq=y[2422],zq=y[2423],mq=y[2424],yq=y[2425],jq=y[2426],xq=y[2427],Cq=y[2428],Fq=y[2429],kq=y[2430],Lq=y[2431],Eq=y[2432],Rq=y[2433],Oq=y[2434],Nq=y[2435],qq=y[2436],Aq=y[2437],Bq=y[2438],Dq=y[2439],Gq=y[2440],Hq=y[2441],Iq=y[2442],Jq=y[2443],Kq=y[2444],Mq=y[2445],Qq=y[2446],Sq=y[2447],Tq=y[2448],Uq=y[2449],Vq=y[2450],Wq=y[2451],Xq=y[2452],Yq=y[2453],Zq=y[2454],$q=y[2455],bA=y[2456],eA=y[2457],cA=y[2458],iA=y[2459],nA=y[2460],lA=y[2461],fA=y[2462],oA=y[2463],dA=y[2464],sA=y[2465],tA=y[2466],uA=y[2467],aA=y[2468],rA=y[2469],hA=y[2470],_A=y[2471],vA=y[2472],PA=y[2473],gA=y[2474],pA=y[2475],wA=y[2476],zA=y[2477],mA=y[2478],yA=y[2479],jA=y[2480],xA=y[2481],CA=y[2482],FA=y[2483],kA=y[2484],LA=y[2485],EA=y[2486],RA=y[2487],OA=y[2488],NA=y[2489],qA=y[2490],AA=y[2491],BA=y[2492],DA=y[2493],GA=y[2494],HA=y[2495],IA=y[2496],JA=y[2497],KA=y[2498],MA=y[2499],QA=y[2500],SA=y[2501],TA=y[2502],UA=y[2503],VA=y[2504],WA=y[2505],XA=y[2506],YA=y[2507],ZA=y[2508],$A=y[2509],bB=y[2510],eB=y[2511],cB=y[2512],iB=y[2513],nB=y[2514],lB=y[2515],fB=y[2516],oB=y[2517],dB=y[2518],sB=y[2519],tB=y[2520],uB=y[2521],aB=y[2522],rB=y[2523],hB=y[2524],_B=y[2525],vB=y[2526],PB=y[2527],gB=y[2528],pB=y[2529],wB=y[2530],zB=y[2531],mB=y[2532],yB=y[2533],jB=y[2534],xB=y[2535],CB=y[2536],FB=y[2537],kB=y[2538],LB=y[2539],EB=y[2540],RB=y[2541],OB=y[2542],NB=y[2543],qB=y[2544],AB=y[2545],BB=y[2546],DB=y[2547],GB=y[2548],HB=y[2549],IB=y[2550],JB=y[2551],KB=y[2552],MB=y[2553],QB=y[2554],SB=y[2555],TB=y[2556],UB=y[2557],VB=y[2558],WB=y[2559],XB=y[2560],YB=y[2561],ZB=y[2562],$B=y[2563],bD=y[2564],eD=y[2565],cD=y[2566],iD=y[2567],nD=y[2568],lD=y[2569],fD=y[2570],oD=y[2571],dD=y[2572],sD=y[2573],tD=y[2574],uD=y[2575],aD=y[2576],rD=y[2577],hD=y[2578],_D=y[2579],vD=y[2580],PD=y[2581],gD=y[2582],pD=y[2583],wD=y[2584],zD=y[2585],mD=y[2586],yD=y[2587],jD=y[2588],xD=y[2589],CD=y[2590],FD=y[2591],kD=y[2592],LD=y[2593],ED=y[2594],RD=y[2595],OD=y[2596],ND=y[2597],qD=y[2598],AD=y[2599],BD=y[2600],DD=y[2601],GD=y[2602],HD=y[2603],ID=y[2604],JD=y[2605],KD=y[2606],MD=y[2607],QD=y[2608],SD=y[2609],TD=y[2610],UD=y[2611],VD=y[2612],WD=y[2613],XD=y[2614],YD=y[2615],ZD=y[2616],$D=y[2617],bG=y[2618],eG=y[2619],cG=y[2620],iG=y[2621],nG=y[2622],lG=y[2623],fG=y[2624],oG=y[2625],dG=y[2626],sG=y[2627],tG=y[2628],uG=y[2629],aG=y[2630],rG=y[2631],hG=y[2632],_G=y[2633],vG=y[2634],PG=y[2635],gG=y[2636],pG=y[2637],wG=y[2638],zG=y[2639],mG=y[2640],yG=y[2641],jG=y[2642],xG=y[2643],CG=y[2644],FG=y[2645],kG=y[2646],LG=y[2647],EG=y[2648],RG=y[2649],OG=y[2650],NG=y[2651],qG=y[2652],AG=y[2653],BG=y[2654],DG=y[2655],GG=y[2656],HG=y[2657],IG=y[2658],JG=y[2659],KG=y[2660],MG=y[2661],QG=y[2662],SG=y[2663],TG=y[2664],UG=y[2665],VG=y[2666],WG=y[2667],XG=y[2668],YG=y[2669],ZG=y[2670],$G=y[2671],bH=y[2672],eH=y[2673],cH=y[2674],iH=y[2675],nH=y[2676],lH=y[2677],fH=y[2678],oH=y[2679],dH=y[2680],sH=y[2681],tH=y[2682],uH=y[2683],aH=y[2684],rH=y[2685],hH=y[2686],_H=y[2687],vH=y[2688],PH=y[2689],gH=y[2690],pH=y[2691],wH=y[2692],zH=y[2693],mH=y[2694],yH=y[2695],jH=y[2696],xH=y[2697],CH=y[2698],FH=y[2699],kH=y[2700],LH=y[2701],EH=y[2702],RH=y[2703],OH=y[2704],NH=y[2705],qH=y[2706],AH=y[2707],BH=y[2708],DH=y[2709],GH=y[2710],HH=y[2711],IH=y[2712],JH=y[2713],KH=y[2714],MH=y[2715],QH=y[2716],SH=y[2717],TH=y[2718],UH=y[2719],VH=y[2720],WH=y[2721],XH=y[2722],YH=y[2723],ZH=y[2724],$H=y[2725],bI=y[2726],eI=y[2727],cI=y[2728],iI=y[2729],nI=y[2730],lI=y[2731],fI=y[2732],oI=y[2733],dI=y[2734],sI=y[2735],tI=y[2736],uI=y[2737],aI=y[2738],rI=y[2739],hI=y[2740],_I=y[2741],vI=y[2742],PI=y[2743],gI=y[2744],pI=y[2745],wI=y[2746],zI=y[2747],mI=y[2748],yI=y[2749],jI=y[2750],xI=y[2751],CI=y[2752],FI=y[2753],kI=y[2754],LI=y[2755],EI=y[2756],RI=y[2757],OI=y[2758],NI=y[2759],qI=y[2760],AI=y[2761],BI=y[2762],DI=y[2763],GI=y[2764],HI=y[2765],II=y[2766],JI=y[2767],KI=y[2768],MI=y[2769],QI=y[2770],SI=y[2771],TI=y[2772],UI=y[2773],VI=y[2774],WI=y[2775],XI=y[2776],YI=y[2777],ZI=y[2778],$I=y[2779],bJ=y[2780],eJ=y[2781],cJ=y[2782],iJ=y[2783],nJ=y[2784],lJ=y[2785],fJ=y[2786],oJ=y[2787],dJ=y[2788],sJ=y[2789],tJ=y[2790],uJ=y[2791],aJ=y[2792],rJ=y[2793],hJ=y[2794],_J=y[2795],vJ=y[2796],PJ=y[2797],gJ=y[2798],pJ=y[2799],wJ=y[2800],zJ=y[2801],mJ=y[2802],yJ=y[2803],jJ=y[2804],xJ=y[2805],CJ=y[2806],FJ=y[2807],kJ=y[2808],LJ=y[2809],EJ=y[2810],RJ=y[2811],OJ=y[2812],NJ=y[2813],qJ=y[2814],AJ=y[2815],BJ=y[2816],DJ=y[2817],GJ=y[2818],HJ=y[2819],IJ=y[2820],JJ=y[2821],KJ=y[2822],MJ=y[2823],QJ=y[2824],SJ=y[2825],TJ=y[2826],UJ=y[2827],VJ=y[2828],WJ=y[2829],XJ=y[2830],YJ=y[2831],ZJ=y[2832],$J=y[2833],bK=y[2834],eK=y[2835],cK=y[2836],iK=y[2837],nK=y[2838],lK=y[2839],fK=y[2840],oK=y[2841],dK=y[2842],sK=y[2843],tK=y[2844],uK=y[2845],aK=y[2846],rK=y[2847],hK=y[2848],_K=y[2849],vK=y[2850],PK=y[2851],gK=y[2852],pK=y[2853],wK=y[2854],zK=y[2855],mK=y[2856],yK=y[2857],jK=y[2858],xK=y[2859],CK=y[2860],FK=y[2861],kK=y[2862],LK=y[2863],EK=y[2864],RK=y[2865],OK=y[2866],NK=y[2867],qK=y[2868],AK=y[2869],BK=y[2870],DK=y[2871],GK=y[2872],HK=y[2873],IK=y[2874],JK=y[2875],KK=y[2876],MK=y[2877],QK=y[2878],SK=y[2879],TK=y[2880],UK=y[2881],VK=y[2882],WK=y[2883],XK=y[2884],YK=y[2885],ZK=y[2886],$K=y[2887],bM=y[2888],eM=y[2889],cM=y[2890],iM=y[2891],nM=y[2892],lM=y[2893],fM=y[2894],oM=y[2895],dM=y[2896],sM=y[2897],tM=y[2898],uM=y[2899],aM=y[2900],rM=y[2901],hM=y[2902],_M=y[2903],vM=y[2904],PM=y[2905],gM=y[2906],pM=y[2907],wM=y[2908],zM=y[2909],mM=y[2910],yM=y[2911],jM=y[2912],xM=y[2913],CM=y[2914],FM=y[2915],kM=y[2916],LM=y[2917],EM=y[2918],RM=y[2919],OM=y[2920],NM=y[2921],qM=y[2922],AM=y[2923],BM=y[2924],DM=y[2925],GM=y[2926],HM=y[2927],IM=y[2928],JM=y[2929],KM=y[2930],MM=y[2931],QM=y[2932],SM=y[2933],TM=y[2934],UM=y[2935],VM=y[2936],WM=y[2937],XM=y[2938],YM=y[2939],ZM=y[2940],$M=y[2941],bQ=y[2942],eQ=y[2943],cQ=y[2944],iQ=y[2945],nQ=y[2946],lQ=y[2947],fQ=y[2948],oQ=y[2949],dQ=y[2950],sQ=y[2951],tQ=y[2952],uQ=y[2953],aQ=y[2954],rQ=y[2955],hQ=y[2956],_Q=y[2957],vQ=y[2958],PQ=y[2959],gQ=y[2960],pQ=y[2961],wQ=y[2962],zQ=y[2963],mQ=y[2964],yQ=y[2965],jQ=y[2966],xQ=y[2967],CQ=y[2968],FQ=y[2969],kQ=y[2970],LQ=y[2971],EQ=y[2972],RQ=y[2973],OQ=y[2974],NQ=y[2975],qQ=y[2976],AQ=y[2977],BQ=y[2978],DQ=y[2979],GQ=y[2980],HQ=y[2981],IQ=y[2982],JQ=y[2983],KQ=y[2984],MQ=y[2985],QQ=y[2986],SQ=y[2987],TQ=y[2988],UQ=y[2989],VQ=y[2990],WQ=y[2991],XQ=y[2992],YQ=y[2993],ZQ=y[2994],$Q=y[2995],bS=y[2996],eS=y[2997],cS=y[2998],iS=y[2999],nS=y[3e3],lS=y[3001],fS=y[3002],oS=y[3003],dS=y[3004],sS=y[3005],tS=y[3006],uS=y[3007],aS=y[3008],rS=y[3009],hS=y[3010],_S=y[3011],vS=y[3012],PS=y[3013],gS=y[3014],pS=y[3015],wS=y[3016],zS=y[3017],mS=y[3018],yS=y[3019],jS=y[3020],xS=y[3021],CS=y[3022],FS=y[3023],kS=y[3024],LS=y[3025],ES=y[3026],RS=y[3027],OS=y[3028],NS=y[3029],qS=y[3030],AS=y[3031],BS=y[3032],DS=y[3033],GS=y[3034],HS=y[3035],IS=y[3036],JS=y[3037],KS=y[3038],MS=y[3039],QS=y[3040],SS=y[3041],TS=y[3042],US=y[3043],VS=y[3044],WS=y[3045],XS=y[3046],YS=y[3047],ZS=y[3048],$S=y[3049],bT=y[3050],eT=y[3051],cT=y[3052],iT=y[3053],nT=y[3054],lT=y[3055],fT=y[3056],oT=y[3057],dT=y[3058],sT=y[3059],tT=y[3060],uT=y[3061],aT=y[3062],rT=y[3063],hT=y[3064],_T=y[3065],vT=y[3066],PT=y[3067],gT=y[3068],pT=y[3069],wT=y[3070],zT=y[3071],mT=y[3072],yT=y[3073],jT=y[3074],xT=y[3075],CT=y[3076],FT=y[3077],kT=y[3078],LT=y[3079],ET=y[3080],RT=y[3081],OT=y[3082],NT=y[3083],qT=y[3084],AT=y[3085],BT=y[3086],DT=y[3087],GT=y[3088],HT=y[3089],IT=y[3090],JT=y[3091],KT=y[3092],MT=y[3093],QT=y[3094],ST=y[3095],TT=y[3096],UT=y[3097],VT=y[3098],WT=y[3099],XT=y[3100],YT=y[3101],ZT=y[3102],$T=y[3103],bU=y[3104],eU=y[3105],cU=y[3106],iU=y[3107],nU=y[3108],lU=y[3109],fU=y[3110],oU=y[3111],dU=y[3112],sU=y[3113],tU=y[3114],uU=y[3115],aU=y[3116],rU=y[3117],hU=y[3118],_U=y[3119],vU=y[3120],PU=y[3121],gU=y[3122],pU=y[3123],wU=y[3124],zU=y[3125],mU=y[3126],yU=y[3127],jU=y[3128],xU=y[3129],CU=y[3130],FU=y[3131],kU=y[3132],LU=y[3133],EU=y[3134],RU=y[3135],OU=y[3136],NU=y[3137],qU=y[3138],AU=y[3139],BU=y[3140],DU=y[3141],GU=y[3142],HU=y[3143],IU=y[3144],JU=y[3145],KU=y[3146],MU=y[3147],QU=y[3148],SU=y[3149],TU=y[3150],UU=y[3151],VU=y[3152],WU=y[3153],XU=y[3154],YU=y[3155],ZU=y[3156],$U=y[3157],bV=y[3158],eV=y[3159],cV=y[3160],iV=y[3161],nV=y[3162],lV=y[3163],fV=y[3164],oV=y[3165],dV=y[3166],sV=y[3167],tV=y[3168],uV=y[3169],aV=y[3170],rV=y[3171],hV=y[3172],_V=y[3173],vV=y[3174],PV=y[3175],gV=y[3176],pV=y[3177],wV=y[3178],zV=y[3179],mV=y[3180],yV=y[3181],jV=y[3182],xV=y[3183],CV=y[3184],FV=y[3185],kV=y[3186],LV=y[3187],EV=y[3188],RV=y[3189],OV=y[3190],NV=y[3191],qV=y[3192],AV=y[3193],BV=y[3194],DV=y[3195],GV=y[3196],HV=y[3197],IV=y[3198],JV=y[3199],KV=y[3200],MV=y[3201],QV=y[3202],SV=y[3203],TV=y[3204],UV=y[3205],VV=y[3206],WV=y[3207],XV=y[3208],YV=y[3209],ZV=y[3210],$V=y[3211],bW=y[3212],eW=y[3213],cW=y[3214],iW=y[3215],nW=y[3216],lW=y[3217],fW=y[3218],oW=y[3219],dW=y[3220],sW=y[3221],tW=y[3222],uW=y[3223],aW=y[3224],rW=y[3225],hW=y[3226],_W=y[3227],vW=y[3228],PW=y[3229],gW=y[3230],pW=y[3231],wW=y[3232],zW=y[3233],mW=y[3234],yW=y[3235],jW=y[3236],xW=y[3237],CW=y[3238],FW=y[3239],kW=y[3240],LW=y[3241],EW=y[3242],RW=y[3243],OW=y[3244],NW=y[3245],qW=y[3246],AW=y[3247],BW=y[3248],DW=y[3249],GW=y[3250],HW=y[3251],IW=y[3252],JW=y[3253],KW=y[3254],MW=y[3255],QW=y[3256],SW=y[3257],TW=y[3258],UW=y[3259],VW=y[3260],WW=y[3261],XW=y[3262],YW=y[3263],ZW=y[3264],$W=y[3265],bX=y[3266],eX=y[3267],cX=y[3268],iX=y[3269],nX=y[3270],lX=y[3271],fX=y[3272],oX=y[3273],dX=y[3274],sX=y[3275],tX=y[3276],uX=y[3277],aX=y[3278],rX=y[3279],hX=y[3280],_X=y[3281],vX=y[3282],PX=y[3283],gX=y[3284],pX=y[3285],wX=y[3286],zX=y[3287],mX=y[3288],yX=y[3289],jX=y[3290],xX=y[3291],CX=y[3292],FX=y[3293],kX=y[3294],LX=y[3295],EX=y[3296],RX=y[3297],OX=y[3298],NX=y[3299],qX=y[3300],AX=y[3301],BX=y[3302],DX=y[3303],GX=y[3304],HX=y[3305],IX=y[3306],JX=y[3307],KX=y[3308],MX=y[3309],QX=y[3310],SX=y[3311],TX=y[3312],UX=y[3313],VX=y[3314],WX=y[3315],XX=y[3316],YX=y[3317],ZX=y[3318],$X=y[3319],bY=y[3320],eY=y[3321],cY=y[3322],iY=y[3323],nY=y[3324],lY=y[3325],fY=y[3326],oY=y[3327],dY=y[3328],sY=y[3329],tY=y[3330],uY=y[3331],aY=y[3332],rY=y[3333],hY=y[3334],_Y=y[3335],vY=y[3336],PY=y[3337],gY=y[3338],pY=y[3339],wY=y[3340],zY=y[3341],mY=y[3342],yY=y[3343],jY=y[3344],xY=y[3345],CY=y[3346],FY=y[3347],kY=y[3348],LY=y[3349],EY=y[3350],RY=y[3351],OY=y[3352],NY=y[3353],qY=y[3354],AY=y[3355],BY=y[3356],DY=y[3357],GY=y[3358],HY=y[3359],IY=y[3360],JY=y[3361],KY=y[3362],MY=y[3363],QY=y[3364],SY=y[3365],TY=y[3366],UY=y[3367],VY=y[3368],WY=y[3369],XY=y[3370],YY=y[3371],ZY=y[3372],$Y=y[3373],bZ=y[3374],eZ=y[3375],cZ=y[3376],iZ=y[3377],nZ=y[3378],lZ=y[3379],fZ=y[3380],oZ=y[3381],dZ=y[3382],sZ=y[3383],tZ=y[3384],uZ=y[3385],aZ=y[3386],rZ=y[3387],hZ=y[3388],_Z=y[3389],vZ=y[3390],PZ=y[3391],gZ=y[3392],pZ=y[3393],wZ=y[3394],zZ=y[3395],mZ=y[3396],yZ=y[3397],jZ=y[3398],xZ=y[3399],CZ=y[3400],FZ=y[3401],kZ=y[3402],LZ=y[3403],EZ=y[3404],RZ=y[3405],OZ=y[3406],NZ=y[3407],qZ=y[3408],AZ=y[3409],BZ=y[3410],DZ=y[3411],GZ=y[3412],HZ=y[3413],IZ=y[3414],JZ=y[3415],KZ=y[3416],MZ=y[3417],QZ=y[3418],SZ=y[3419],TZ=y[3420],UZ=y[3421],VZ=y[3422],WZ=y[3423],XZ=y[3424],YZ=y[3425],ZZ=y[3426],$Z=y[3427],b$=y[3428],e$=y[3429],c$=y[3430],i$=y[3431],n$=y[3432],l$=y[3433],f$=y[3434],o$=y[3435],d$=y[3436],s$=y[3437],t$=y[3438],u$=y[3439],a$=y[3440],r$=y[3441],h$=y[3442],_$=y[3443],v$=y[3444],P$=y[3445],g$=y[3446],p$=y[3447],w$=y[3448],z$=y[3449],m$=y[3450],y$=y[3451],j$=y[3452],x$=y[3453],C$=y[3454],F$=y[3455],k$=y[3456],L$=y[3457],E$=y[3458],R$=y[3459],O$=y[3460],N$=y[3461],q$=y[3462],A$=y[3463],B$=y[3464],D$=y[3465],G$=y[3466],H$=y[3467],I$=y[3468],J$=y[3469],K$=y[3470],M$=y[3471],Q$=y[3472],S$=y[3473],T$=y[3474],U$=y[3475],V$=y[3476],W$=y[3477],X$=y[3478],Y$=y[3479],Z$=y[3480],$$=y[3481],b11=y[3482],e11=y[3483],c11=y[3484],i11=y[3485],n11=y[3486],l11=y[3487],f11=y[3488],o11=y[3489],d11=y[3490],s11=y[3491],t11=y[3492],u11=y[3493],a11=y[3494],r11=y[3495],h11=y[3496],_11=y[3497],v11=y[3498],P11=y[3499],g11=y[3500],p11=y[3501],w11=y[3502],z11=y[3503],m11=y[3504],y11=y[3505],j11=y[3506],x11=y[3507],C11=y[3508],F11=y[3509],k11=y[3510],L11=y[3511],E11=y[3512],R11=y[3513],O11=y[3514],N11=y[3515],q11=y[3516],A11=y[3517],B11=y[3518],D11=y[3519],G11=y[3520],H11=y[3521],I11=y[3522],J11=y[3523],K11=y[3524],M11=y[3525],Q11=y[3526],S11=y[3527],T11=y[3528],U11=y[3529],V11=y[3530],W11=y[3531],X11=y[3532],Y11=y[3533],Z11=y[3534],$11=y[3535],bb1=y[3536],eb1=y[3537],cb1=y[3538],ib1=y[3539],nb1=y[3540],lb1=y[3541],fb1=y[3542],ob1=y[3543],db1=y[3544],sb1=y[3545],tb1=y[3546],ub1=y[3547],ab1=y[3548],rb1=y[3549],hb1=y[3550],_b1=y[3551],vb1=y[3552],Pb1=y[3553],gb1=y[3554],pb1=y[3555],wb1=y[3556],zb1=y[3557],mb1=y[3558],yb1=y[3559],jb1=y[3560],xb1=y[3561],Cb1=y[3562],Fb1=y[3563],kb1=y[3564],Lb1=y[3565],Eb1=y[3566],Rb1=y[3567],Ob1=y[3568],Nb1=y[3569],qb1=y[3570],Ab1=y[3571],Bb1=y[3572],Db1=y[3573],Gb1=y[3574],Hb1=y[3575],Ib1=y[3576],Jb1=y[3577],Kb1=y[3578],Mb1=y[3579],Qb1=y[3580],Sb1=y[3581],Tb1=y[3582],Ub1=y[3583],Vb1=y[3584],Wb1=y[3585],Xb1=y[3586],Yb1=y[3587],Zb1=y[3588],$b1=y[3589],be1=y[3590],ee1=y[3591],ce1=y[3592],ie1=y[3593],ne1=y[3594],le1=y[3595],fe1=y[3596],oe1=y[3597],de1=y[3598],se1=y[3599],te1=y[3600],ue1=y[3601],ae1=y[3602],re1=y[3603],he1=y[3604],_e1=y[3605],ve1=y[3606],Pe1=y[3607],ge1=y[3608],pe1=y[3609],we1=y[3610],ze1=y[3611],me1=y[3612],ye1=y[3613],je1=y[3614],xe1=y[3615],Ce1=y[3616],Fe1=y[3617],ke1=y[3618],Le1=y[3619],Ee1=y[3620],Re1=y[3621],Oe1=y[3622],Ne1=y[3623],qe1=y[3624],Ae1=y[3625],Be1=y[3626],De1=y[3627],Ge1=y[3628],He1=y[3629],Ie1=y[3630],Je1=y[3631],Ke1=y[3632],Me1=y[3633],Qe1=y[3634],Se1=y[3635],Te1=y[3636],Ue1=y[3637],Ve1=y[3638],We1=y[3639],Xe1=y[3640],Ye1=y[3641],Ze1=y[3642],$e1=y[3643],b21=y[3644],e21=y[3645],c21=y[3646],i21=y[3647],n21=y[3648],l21=y[3649],f21=y[3650],o21=y[3651],d21=y[3652],s21=y[3653],t21=y[3654],u21=y[3655],a21=y[3656],r21=y[3657],h21=y[3658],_21=y[3659],v21=y[3660],P21=y[3661],g21=y[3662],p21=y[3663],w21=y[3664],z21=y[3665],m21=y[3666],y21=y[3667],j21=y[3668],x21=y[3669],C21=y[3670],F21=y[3671],k21=y[3672],L21=y[3673],E21=y[3674],R21=y[3675],O21=y[3676],N21=y[3677],q21=y[3678],A21=y[3679],B21=y[3680],D21=y[3681],G21=y[3682],H21=y[3683],I21=y[3684],J21=y[3685],K21=y[3686],M21=y[3687],Q21=y[3688],S21=y[3689],T21=y[3690],U21=y[3691],V21=y[3692],W21=y[3693],X21=y[3694],Y21=y[3695],Z21=y[3696],$21=y[3697],b31=y[3698],e31=y[3699],c31=y[3700],i31=y[3701],n31=y[3702],l31=y[3703],f31=y[3704],o31=y[3705],d31=y[3706],s31=y[3707],t31=y[3708],u31=y[3709],a31=y[3710],r31=y[3711],h31=y[3712],_31=y[3713],v31=y[3714],P31=y[3715],g31=y[3716],p31=y[3717],w31=y[3718],z31=y[3719],m31=y[3720],y31=y[3721],j31=y[3722],x31=y[3723],C31=y[3724],F31=y[3725],k31=y[3726],L31=y[3727],E31=y[3728],R31=y[3729],O31=y[3730],N31=y[3731],q31=y[3732],A31=y[3733],B31=y[3734],D31=y[3735],G31=y[3736],H31=y[3737],I31=y[3738],J31=y[3739],K31=y[3740],M31=y[3741],Q31=y[3742],S31=y[3743],T31=y[3744],U31=y[3745],V31=y[3746],W31=y[3747],X31=y[3748],Y31=y[3749],Z31=y[3750],$31=y[3751],bc1=y[3752],ec1=y[3753],cc1=y[3754],ic1=y[3755],nc1=y[3756],lc1=y[3757],fc1=y[3758],oc1=y[3759],dc1=y[3760],sc1=y[3761],tc1=y[3762],uc1=y[3763],ac1=y[3764],rc1=y[3765],hc1=y[3766],_c1=y[3767],vc1=y[3768],Pc1=y[3769],gc1=y[3770],pc1=y[3771],wc1=y[3772],zc1=y[3773],mc1=y[3774],yc1=y[3775],jc1=y[3776],xc1=y[3777],Cc1=y[3778],Fc1=y[3779],kc1=y[3780],Lc1=y[3781],Ec1=y[3782],Rc1=y[3783],Oc1=y[3784],Nc1=y[3785],qc1=y[3786],Ac1=y[3787],Bc1=y[3788],Dc1=y[3789],Gc1=y[3790],Hc1=y[3791],Ic1=y[3792],Jc1=y[3793],Kc1=y[3794],Mc1=y[3795],Qc1=y[3796],Sc1=y[3797],Tc1=y[3798],Uc1=y[3799],Vc1=y[3800],Wc1=y[3801],Xc1=y[3802],Yc1=y[3803],Zc1=y[3804],$c1=y[3805],bi1=y[3806],ei1=y[3807],ci1=y[3808],ii1=y[3809],ni1=y[3810],li1=y[3811],fi1=y[3812],oi1=y[3813],di1=y[3814],si1=y[3815],ti1=y[3816],ui1=y[3817],ai1=y[3818],ri1=y[3819],hi1=y[3820],_i1=y[3821],vi1=y[3822],Pi1=y[3823],gi1=y[3824],pi1=y[3825],wi1=y[3826],zi1=y[3827],mi1=y[3828],yi1=y[3829],ji1=y[3830],xi1=y[3831],Ci1=y[3832],Fi1=y[3833],ki1=y[3834],Li1=y[3835],Ei1=y[3836],Ri1=y[3837],Oi1=y[3838],Ni1=y[3839],qi1=y[3840],Ai1=y[3841],Bi1=y[3842],Di1=y[3843],Gi1=y[3844],Hi1=y[3845],Ii1=y[3846],Ji1=y[3847],Ki1=y[3848],Mi1=y[3849],Qi1=y[3850],Si1=y[3851],Ti1=y[3852],Ui1=y[3853],Vi1=y[3854],Wi1=y[3855],Xi1=y[3856],Yi1=y[3857],Zi1=y[3858],$i1=y[3859],bn1=y[3860],en1=y[3861],cn1=y[3862],in1=y[3863],nn1=y[3864],ln1=y[3865],fn1=y[3866],on1=y[3867],dn1=y[3868],sn1=y[3869],tn1=y[3870],un1=y[3871],an1=y[3872],rn1=y[3873],hn1=y[3874],_n1=y[3875],vn1=y[3876],Pn1=y[3877],gn1=y[3878],pn1=y[3879],wn1=y[3880],zn1=y[3881],mn1=y[3882],yn1=y[3883],jn1=y[3884],xn1=y[3885],Cn1=y[3886],Fn1=y[3887],kn1=y[3888],Ln1=y[3889],En1=y[3890],Rn1=y[3891],On1=y[3892],Nn1=y[3893],qn1=y[3894],An1=y[3895],Bn1=y[3896],Dn1=y[3897],Gn1=y[3898],Hn1=y[3899],In1=y[3900],Jn1=y[3901],Kn1=y[3902],Mn1=y[3903],Qn1=y[3904],Sn1=y[3905],Tn1=y[3906],Un1=y[3907],Vn1=y[3908],Wn1=y[3909],Xn1=y[3910],Yn1=y[3911],Zn1=y[3912],$n1=y[3913],b01=y[3914],e01=y[3915],c01=y[3916],i01=y[3917],n01=y[3918],l01=y[3919],f01=y[3920],o01=y[3921],d01=y[3922],s01=y[3923],t01=y[3924],u01=y[3925],a01=y[3926],r01=y[3927],h01=y[3928],_01=y[3929],v01=y[3930],P01=y[3931],g01=y[3932],p01=y[3933];while(!![]){let b=n[c++];if([null,99732,99392,99354,99334,99305,99298,99252,99229,99214,99211,99203,99178,99048,99029,98963,98935,98908,98899,98860,98791,98730,98663,98614,98593,98404,98393,98268,98208,98177,97984,97890,97889,97703,97647,97488,97341,97279,97177,97100,97096,97083,96995,96968,96953,96924,96858,96829,96814,96796,96715,96622,96614,96592,96544,96465,96381,96357,96320,96287,95956,95882,95871,95858,95837,95828,95822,95785,95772,95746,95535,95521,95520,95432,95340,95297,95280,95273,95129,95046,94936,94925,94818,94761,94717,94706,94628,94627,94608,94604,94600,94567,94546,94438,94423,94254,94249,94216,94200,94138,94132,94112,94096,94089,94029,94027,94014,93982,93868,93853,93808,93794,93763,93678,93666,93515,93492,93443,93417,93400,93379,93313,93312,93259,93214,93167,93046,92911,92819,92769,92678,92620,92557,92519,92360,92317,92258,92200,92102,92077,92032,91938,91929,91866,91767,91629,91609,91601,91577,91512,91397,91286,91239,91237,91212,91204,91095,91086,91084,90563,90538,90444,90441,90428,90327,90317,90270,90231,90178,90138,90125,90046,90037,90026,90020,89987,89939,89822,89802,89700,89637,89554,89463,89349,89247,89209,89187,89156,89146,89016,89013,88972,88925,88915,88884,88868,88826,88610,88563,88479,88459,88450,88444,88411,88381,88373,88336,88288,88273,88224,88201,88184,88067,88012,87984,87849,87846,87842,87829,87821,87757,87670,87644,87614,87580,87434,87419,87365,87329,87261,87164,86955,86908,86887,86673,86555,86487,86431,86327,86268,86265,86251,86147,86081,86056,85936,85901,85883,85874,85833,85359,85345,85301,85281,85268,85212,85162,85140,85117,85059,85046,85038,85019,84976,84965,84862,84671,84646,84633,84629,84586,84554,84541,84514,84510,84483,84455,84410,84397,84339,84329,84224,84144,84079,84024,84006,83987,83898,83795,83748,83735,83553,83510,83504,83492,83423,83411,83395,83393,83379,83361,83194,83184,83130,83124,83080,83078,82996,82995,82888,82841,82781,82719,82665,82625,82576,82535,82530,82420,82327,82255,82114,82109,82035,82021,82020,81798,81661,81519,81518,81474,81416,81405,81319,81278,81260,81230,81082,81057,81014,80906,80867,80809,80804,80761,80749,80701,80675,80664,80637,80580,80525,80521,80481,80337,80326,80323,80271,80258,80186,80119,80101,79881,79833,79807,79723,79408,79343,79284,79228,79184,79177,79082,79078,78947,78943,78894,78762,78756,78729,78668,78651,78645,78496,78465,78298,78285,78161,78147,77952,77877,77854,77832,77822,77820,77741,77643,77503,77409,77384,77309,77308,77298,77258,77208,77198,77114,77048,76974,76919,76882,76847,76717,76689,76626,76524,76386,76328,76267,76233,76200,76173,76171,76147,76105,76080,75987,75973,75945,75905,75883,75779,75716,75694,75658,75646,75588,75538,75506,75421,75384,75348,75294,75260,75251,75250,75243,75137,75060,74768,74727,74708,74692,74691,74677,74372,74261,74239,74025,74008,73992,73901,73857,73809,73805,73795,73750,73684,73635,73541,73526,73437,73419,73334,73230,73199,73195,73185,73183,73085,73022,72973,72909,72847,72832,72818,72746,72708,72704,72490,72483,72467,72379,72297,72284,72233,72209,72206,72152,72111,72037,72036,71859,71840,71764,71660,71654,71528,71511,71483,71461,71276,71220,70977,70962,70951,70931,70911,70909,70846,70798,70781,70762,70753,70691,70535,70515,70288,70262,70202,70170,70143,70138,70088,70048,70022,70016,69995,69993,69986,69957,69949,69917,69840,69832,69780,69772,69761,69698,69635,69626,69285,69277,69270,69245,69188,69177,69160,69093,69076,68956,68935,68810,68800,68778,68728,68688,68577,68566,68527,68474,68464,68328,68147,68130,68099,68094,68026,67996,67990,67865,67796,67773,67677,67648,67609,67474,67381,67357,67346,67318,67277,67243,67228,67156,66944,66930,66929,66804,66766,66753,66671,66598,66581,66564,66515,66431,66395,66314,66300,66292,66233,66132,66105,66064,66052,65921,65770,65714,65618,65570,65552,65527,65516,65504,65337,65289,65229,65163,65132,65102,65077,65027,65025,65005,64989,64977,64971,64869,64798,64749,64586,64459,64383,64341,64311,64296,64235,64215,64131,64111,64042,63880,63867,63798,63774,63620,63566,63535,63499,63465,63439,63343,63327,63301,63243,63227,63123,63037,62826,62792,62778,62722,62716,62678,62612,62564,62526,62522,62398,62345,62260,62246,62229,62166,62133,62025,61970,61966,61958,61859,61845,61814,61765,61700,61699,61611,61602,61532,61474,61315,61306,61287,61283,61224,61138,61071,61057,60973,60960,60956,60822,60632,60594,60575,60558,60440,60432,60292,60290,60207,60162,60132,59934,59918,59894,59849,59768,59730,59645,59635,59623,59567,59499,59454,59439,59421,59339,59279,59249,59185,59174,59035,59023,59001,58821,58748,58667,58658,58649,58581,58577,58464,58428,58380,58358,58245,58230,58218,58131,58107,58103,58027,57989,57954,57947,57840,57814,57779,57761,57757,57654,57518,57508,57494,57447,57370,57347,57239,57205,57159,57115,57056,56955,56904,56889,56874,56838,56621,56613,56577,56503,56413,56387,56383,56346,56152,56122,56070,56065,55817,55816,55801,55644,55600,55596,55527,55467,55377,55309,55299,55295,55275,55252,55206,55137,55115,55060,55016,54991,54965,54875,54871,54837,54836,54831,54819,54796,54695,54681,54645,54623,54502,54456,54425,54376,54321,54308,54269,54220,54123,54091,54059,53792,53772,53689,53682,53594,53562,53454,53449,53391,53371,53357,53349,53346,53330,53265,53246,53224,53148,53141,53103,53095,53041,52966,52893,52872,52838,52735,52676,52568,52473,52424,52416,52400,52326,52241,52230,52221,52207,52181,51990,51887,51789,51761,51724,51664,51630,51553,51517,51506,51347,51313,51271,51229,51223,51123,51120,51011,50991,50932,50873,50820,50771,50632,50614,50550,50547,50531,50445,50350,50281,50256,50187,49816,49779,49739,49694,49685,49677,49647,49524,49413,49395,49388,49367,49225,49128,49017,48987,48932,48902,48875,48847,48783,48734,48702,48590,48515,48370,48351,48343,48280,48275,48187,48165,48124,48111,48105,48041,48037,47865,47796,47649,47640,47633,47430,47411,47364,47344,47337,47322,47314,47230,47225,47149,47093,47082,47067,47060,47058,47057,47044,47011,46878,46764,46670,46657,46493,46491,46457,46383,46357,46295,46256,46204,46195,46011,45944,45890,45841,45824,45817,45684,45671,45655,45633,45616,45602,45592,45474,45434,45431,45250,45187,45168,45144,45048,45038,44973,44941,44892,44720,44693,44668,44484,44375,44297,44188,44141,44080,44052,44040,44007,43962,43854,43843,43815,43801,43798,43681,43544,43468,43415,43368,43304,43124,43068,43050,43005,42824,42762,42689,42671,42645,42633,42606,42577,42568,42495,42396,42374,42242,42108,42074,42010,41907,41792,41756,41752,41612,41599,41525,41451,41431,41404,41388,41214,41131,41012,40971,40932,40887,40831,40808,40690,40639,40552,40481,40433,40404,40237,40233,40220,40218,40190,40118,40032,40003,39925,39866,39765,39754,39752,39675,39582,39437,39257,39234,39097,39054,39040,39005,38972,38952,38902,38856,38793,38770,38733,38727,38685,38673,38640,38637,38552,38495,38225,38218,38206,38135,38088,37972,37809,37760,37759,37633,37570,37551,37530,37509,37483,37427,37251,37091,37062,36968,36951,36827,36786,36780,36753,36706,36697,36669,36635,36531,36371,36370,36368,36325,36306,36266,36209,36038,36033,35943,35931,35853,35683,35546,35487,35463,35448,35445,35321,35221,35190,35150,35136,35085,34999,34917,34728,34672,34654,34641,34632,34534,34483,34421,34375,34291,34253,34175,34160,34111,33999,33975,33964,33955,33905,33808,33781,33726,33621,33508,33502,33485,33448,33432,33424,33355,33305,33287,33275,33175,33097,33064,33038,33025,32975,32918,32898,32877,32869,32847,32830,32826,32802,32796,32748,32636,32495,32387,32351,32238,32212,32120,32077,32039,32010,31993,31911,31900,31854,31674,31663,31647,31630,31625,31612,31570,31502,31492,31487,31464,31391,31285,31109,31073,31046,30959,30917,30888,30810,30752,30726,30686,30636,30616,30603,30513,30291,30270,30249,30083,30079,30055,30041,30039,3e4,29996,29791,29765,29708,29616,29607,29587,29508,29451,29439,29399,29378,29244,29238,29157,29121,29109,29099,29098,29094,29076,29018,29008,29007,28886,28865,28860,28797,28793,28686,28674,28641,28633,28621,28522,28424,28408,28367,28334,28331,28278,28270,28245,28091,28075,28073,28064,28051,28025,27904,27883,27873,27685,27642,27632,27598,27553,27271,27151,27148,27108,27075,26984,26966,26942,26928,26894,26863,26861,26816,26797,26755,26676,26563,26525,26424,26305,26219,26076,25982,25974,25935,25896,25863,25830,25770,25738,25724,25687,25674,25604,25600,25577,25576,25544,25537,25468,25390,25385,25251,25174,25169,25166,25092,25089,25037,25012,24824,24772,24770,24733,24569,24435,24398,24376,24321,24270,24225,24197,24133,24100,24089,24029,23974,23960,23939,23810,23772,23672,23487,23452,23309,23229,23220,23210,23199,23177,23127,23100,23032,22878,22855,22854,22816,22779,22742,22720,22646,22562,22532,22387,22376,22292,22175,22028,21998,21873,21861,21804,21773,21686,21621,21619,21610,21510,21505,21351,21348,21087,21056,21017,21009,20974,20836,20738,20642,20523,20501,20499,20487,20327,20323,20286,20275,20237,20085,20026,19969,19863,19857,19846,19807,19785,19691,19673,19650,19638,19631,19606,19563,19531,19498,19297,19272,19227,19222,19104,19100,19073,19010,18989,18975,18940,18760,18722,18684,18536,18508,18500,18436,18373,18303,18293,18171,18166,18105,18046,18037,17990,17962,17807,17787,17594,17575,17538,17515,17364,17342,17335,17235,17213,17153,17142,17119,17096,17071,16963,16950,16890,16798,16712,16651,16641,16630,16579,16555,16534,16196,16157,16128,16118,16074,16045,15879,15863,15834,15619,15584,15554,15473,15399,15367,15359,15311,15221,15135,15121,15085,15056,14996,14930,14893,14873,14838,14777,14567,14558,14543,14510,14440,14305,14259,14252,14191,14061,13876,13845,13839,13799,13768,13618,13505,13472,13375,13314,13308,13043,13020,12998,12958,12934,12908,12863,12846,12798,12793,12791,12751,12652,12648,12634,12583,12571,12528,12479,12307,12295,12195,12177,12174,12123,11850,11847,11822,11736,11679,11671,11662,11553,11533,11481,11396,11393,11376,11373,11322,11288,11249,11233,11225,11202,11163,11114,11074,11007,10993,10934,10918,10848,10815,10797,10780,10707,10688,10684,10682,10680,10580,10510,10478,10454,10425,10400,10397,10390,10385,10382,10355,10238,10189,10081,9882,9755,9690,9609,9608,9603,9598,9560,9431,9380,9366,9342,9185,9006,8964,8959,8954,8851,8676,8628,8481,8336,8329,8293,8277,8275,8104,8039,7946,7923,7908,7900,7875,7830,7828,7822,7709,7688,7680,7596,7590,7557,7464,7323,7256,7192,7158,7115,7058,7016,6958,6942,6935,6831,6771,6770,6716,6612,6498,6481,6448,6335,6279,6267,6246,6215,6138,6114,6107,6010,5848,5806,5750,5656,5436,5398,5365,5324,5306,5269,5245,5223,5190,5156,5083,5056,5025,4881,4866,4800,4796,4789,4775,4753,4750,4654,4619,4606,4544,4538,4428,4331,4304,4261,4257,4222,4132,3947,3912,3901,3890,3872,3767,3683,3616,3595,3532,3487,3479,3441,3425,3334,3325,3324,3318,3294,3203,3180,3105,3013,2992,2965,2946,2797,2655,2566,2546,2486,2445,2403,2307,2222,2130,2113,2106,1998,1900,1877,1728,1682,1594,1593,1454,1364,1213,1059,1001,983,982,934,926,920,881,798,736,733,667,621,452,420,382,292,254,189,162,50,28].indexOf(b)==-1){debugger;return"-90_cbb";}else if(b<49816){debugger;if(b<25251){debugger;if(b<12177){debugger;if(b<6335){debugger;if(b<3479){debugger;if(b<1877){debugger;if(b<920){debugger;if(b<452){debugger;if(b<254){debugger;if(b<162){debugger;if(b<29){debugger;r=N0(u);a=uT(!r);}else{debugger;s=bf(r);t=n[c++];s?(c+=t,_=Zi1(s)):P=10;}}else b<163?(r=d_(s),t=iT(void r)):(s=U2(u),t=OF(u),s=t>s,t=v8(s));}else b<382?b<255?(r=xS(u),s=PF(typeof r)):t=xm(d):b<383?(s=r2(r),t=MT(P),s=t<s,h=B7(s)):function(){s=n[c++];u=Xy(r);z=[];for(t=0;t<s;t++){z.splice(0,0,XF(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=Xq(a);}();}else if(b<736){debugger;if(b<667){debugger;if(b<453){debugger;s=jL(h);t=n[c++];!s?c+=t:P=10;}else{debugger;r=_9(u);s=$C(~r);}}else if(b<668){debugger;s=EN(u);t=mB(a);u=f2(r);u.variablePool!=undefined?o(u,t,s):u[t]=s;}else{debugger;return;}}else if(b<881){debugger;b<737?(s=bs(t),u=n[c++],s?c+=u:P=9):t=dl([]);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=Z0(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else if(b<1213){debugger;if(b<983){debugger;if(b<934){debugger;b<921?(s=HV(u),t=pX(a),s=t>>s,r=db1(s)):(s=n[c++],c+=s);}else if(b<935){debugger;return;}else{debugger;s=dM(u);t=PU(a);s=t%s;r=En1(s);}}else if(b<1059){debugger;if(b<984){debugger;s=H31(u);t=n[c++];!s?(c+=t,a=wx(s)):P=10;}else{debugger;s=le1(t);u=n[c++];s?c+=u:P=9;}}else{debugger;t=Mh([]);}}else b<1594?b<1454?b<1214?(s=ZG(r),t=R_(h),s=t*s,_=he(s)):(s=FZ(r),t=e2(P),s=t<s,h=Zq(s)):b<1455?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,te1()):a.splice(0,0,aV());}s=vD(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=n[c++],t=Lk(u),u=UJ(s),t[constantPool[s]]=u):b<1728?b<1595?(s=Fa(u),t=dc1(s),t.push(s),s=Tn(t)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=N7(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Gp(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Ul(a);}():(s=as(u),t=_s(u),s=t>s,t=uF(s));}else if(b<2946){debugger;b<2403?b<2113?b<1998?b<1878?(s=y3(u),t=WL(s),s=t<s,s=pV(s)):(s=zm(m),t=om(s),s=t<=s,r=gf(s)):b<1999?(s=Mi(r),t=Xu(u),u=D9(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=kn1(a),t=eI(r),s=t-s,h=x9(s)):b<2222?b<2114?(r=ED(u),a=q4(!r)):(s=p01(),t=c11(),s=t!=s,wG(s)):b<2223?(s=BI(u),t=xw(a),s=t%s,r=GU(s)):(s=rh(t),t=gi(u),u=Vn(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=el(u)):b<2566?b<2486?b<2404?function(){debugger;}():(s=ao(u),t=Zn(a),s=t%s,r=T0(s)):b<2487?(s=Q1(a),t=T5(s),s=t>=s,t=v31(s)):s=kN(t):b<2797?b<2567?(s=n[c++],t=Cd(constantPool[s])):(s=fX(a),t=XP(r),s=t-s,h=$6(s)):(s=n21(u),t=d8(s),s=t in s,s=In1(s));}else if(b<3294){debugger;if(b<3105){debugger;if(b<2992){debugger;if(b<2947){debugger;s=YP(t);t=Am(u);s=t&s;u=Jk(s);}else{debugger;return;}}else b<2993?(s=qC(P),u=n[c++],s[constantPool[u]]+=1):function(){s=n[c++];u=we1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,g4(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=DM(a);}();}else b<3203?b<3106?(s=zM(u),t=r$(a),s=t%s,r=w1(s)):(s=q2(P),u=n[c++],s[constantPool[u]]+=1):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,Qr()):a.splice(0,0,vn());}s=tu(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}();}else if(b<3334){debugger;if(b<3324){debugger;if(b<3295){debugger;s=LL(P);u=n[c++];s[constantPool[u]]+=1;}else{debugger;s=Fe(r);t=n[c++];s?(c+=t,_=gU(s)):P=10;}}else b<3325?(s=l4(u),t=io(a),s=t>>s,r=eJ(s)):function(){s=n[c++];u=p21(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Dl(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=N4(a);}();}else b<3441?b<3335?(s=F5(u),t=kW(a),s=t%s,r=di(s)):(s=zB(r),t=v9(h),s=t!==s,s=jb1(s)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=Fr(s));}else if(b<4800){debugger;if(b<4261){debugger;b<3890?b<3616?b<3532?b<3480?(s=xc1(t),t=I3(u),s=t&s,u=TU(s)):(s=hI(u),t=XH(s),s=t<s,s=I5(s)):b<3533?(s=Yx(t),t=ih(a),s=t instanceof s,_=VS(s)):(r=cN(u),s=eu(~r)):b<3767?b<3617?(s=SE(h),t=Ec1(s),s=t>>>s,t=Vw(s)):(s=Xw(u),t=lM(a),s=t%s,r=gc1(s)):b<3768?(s=_Q(t),t=ed(a),s=t instanceof s,_=Rz(s)):(s=Wi1(u),t=Ut(a),s=t%s,r=tU(s)):b<4132?b<3912?b<3891?(s=n[c++],c+=s):(s=w3(u),t=W3(s),s=t in s,s=Vp(s)):b<3913?function(){w=[];u=n[c++];t=UE(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():function(){s=n[c++];u=Rj(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Kj(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=_f(a);}():b<4257?b<4133?(s=C1(u),t=ZZ(s),s=t in s,s=Qe(s)):(r=Wa(u),a=G8(!r)):(r=$H(u),a=PX(!r));}else if(b<4619){debugger;if(b<4538){debugger;if(b<4331){debugger;if(b<4262){debugger;s=i6(m);t=QD(s);s=t<=s;r=ou(s);}else{debugger;return;}}else if(b<4332){debugger;s=Ol(t);t=WI(u);u=qt(a);u.variablePool!=undefined?o(u,t,s):u[t]=s;s=rG(u);}else{debugger;s=OI(u);t=n[c++];!s?(c+=t,a=Yq(s)):P=10;}}else if(b<4606){debugger;if(b<4539){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;(function(){w=[];u=n[c++];t=Z9(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;})();}}else{debugger;s=SV(r);t=n[c++];s?(c+=t,_=Or(s)):P=10;}}else if(b<4775){debugger;b<4750?b<4620?(s=n[c++],t=JE(s)):function(){s=Nu(t);t=uS(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=gV(s);}():b<4751?(r=bM(s),t=nP(void r)):(r=op(t),t=W21(-r));}else if(b<4796){debugger;b<4776?(s=lJ(t),t=g6(s),s=t+s,s=$4(s)):(s=J2(h),t=Rg(s),s=t>>>s,t=ia(s));}else{debugger;s=J7(r);t=n[c++];s?(c+=t,_=mt(s)):P=10;}}else b<5398?b<5190?b<5056?b<4881?b<4801?(s=Vr(u),t=$9(t),s=t<<s,s=oL(s)):(s=tP(u),t=pW(s),u=delete t[s],s=Vc(u)):b<4882?(s=n[c++],c+=s):(s=n[c++],t=in1(u),u=mj(s),t[constantPool[s]]=u):b<5156?b<5057?(s=IL(u),t=TD(a),u=GW(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=nq(u),t=Ne(u),s=t>s,t=fh(s)):(s=KC(u),t=Ht(s),t.push(s),s=Cy(t)):b<5306?b<5245?b<5191?(i=m,s=gk(m)):(s=be1(u),t=zJ(s),s=t<s,s=Pe1(s)):b<5246?(s=ui(u),t=kE(a),s=t>>s,r=zw(s)):(s=DQ(u),t=HO(t),s=t<<s,s=W6(s)):b<5365?b<5307?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,GC()):a.splice(0,0,UB());}s=Yo(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=Zx(u),t=Zi(u),s=t>s,t=M(s)):(s=n[c++],t=rs(constantPool[s])):b<6107?b<5806?b<5656?b<5399?(s=si1(u),t=hy(s),u=delete t[s],s=Nj(u)):function(){w=[];u=n[c++];t=Et(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():b<5657?(s=CI(u),t=aJ(a),s=t>>s,r=Vu(s)):(s=oU(h),t=Bi1(_),s=t|s,v=yY(s)):b<6010?b<5807?(s=Ze1(u),t=fg(s),s=t in s,s=hF(s)):(s=jo(u),t=Wx(a),s=t^s,r=ln1(s)):(i=m,s=wE(m)):b<6246?b<6138?b<6108?(r=DF(u),s=tw(~r)):(s=VG(t),t=x(s),s=t===s,u=i7(s)):b<6139?(s=yU(a),t=lq(s),s=t>=s,t=Y2(s)):(r=Ue1(u),a=ha(!r)):b<6279?b<6247?(s=n_(a),t=Hc1(s),s=t>=s,t=Ka(s)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=Qf(s)):(s=Mt(t),t=uE(s),s=t==s,a=K$(s));}else if(b<9608){debugger;if(b<7875){debugger;if(b<7158){debugger;if(b<6831){debugger;b<6612?b<6481?b<6336?(s=Gu(r),t=FC(h),s=t!==s,s=nj(s)):(s=jz(u),t=kG(s),u=delete t[s],s=oc1(u)):b<6482?(s=ul(a),t=vy(r),s=t-s,h=kh(s)):(s=DH(a),t=sr(s),s=t>=s,t=GQ(s)):b<6770?b<6613?function(){s=h6(t);t=gR(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=_e1(s);}():(s=Uq(h),t=$D(_),s=t|s,v=yc1(s)):b<6771?(s=Xo(t),t=U21(s),s=t+s,s=lA(s)):(s=zo(u),t=Rd(s),s=t<s,s=VD(s));}else if(b<7016){debugger;if(b<6942){debugger;if(b<6832){debugger;s=mU(t);t=V5(s);s=t-s;s=Nt(s);}else{debugger;s=fq(u);t=n[c++];!s?(c+=t,a=ML(s)):P=10;}}else b<6943?(s=V7(r),t=RL(h),s=t*s,_=EV(s)):(s=n[c++],t=v$(constantPool[s]));}else b<7115?b<7017?(s=n[c++],t=o3(u),u=pD(s),t[constantPool[s]]=u):(s=Qq(t),t=S0(u),u=$K(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=Yz(u)):(s=pz(t),t=qb1(u),s=t&s,u=q_(s));}else b<7596?b<7464?b<7256?b<7159?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=re1(s)):(s=y11(t),t=vS(a),s=t instanceof s,_=K4(s)):b<7257?(s=qK(m),t=Mq(s),s=t<=s,r=qi(s)):(s=hT(t),t=nD(s),s=t+s,s=Ue(s)):b<7590?b<7465?(s=n[c++],t=yS(constantPool[s])):function(){s=n[c++];u=sE(r);z=[];for(t=0;t<s;t++){z.splice(0,0,ib(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=$t(a);}():(r=P0(u),s=T21(~r)):b<7822?b<7688?b<7597?(r=Ox(s),t=dW(void r)):(s=NU(u),t=Dz(a),u=U31(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<7689?function(){s=e.CFf;for(t=0;t<s;t++)P=Hi(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(r=T(t),t=lz(-r)):b<7830?b<7823?(r=E6(s),t=BC(void r)):function(){s=W2(t);t=vq(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=wm(s);}():(g=NX(s),p=xn1(t),p.variablePool!=undefined?o(p,g,_):p[g]=s);}else if(b<8676){debugger;if(b<8275){debugger;if(b<7946){debugger;b<7908?b<7876?(r=i3(u),s=E9(~r)):(s=zv(t),t=Ic1(s),s=t-s,s=LJ(s)):b<7909?(s=$M(m),t=EO(s),s=t<=s,r=Tw(s)):(s=hv(u),t=Fi1(u),s=t>s,t=o1(s));}else if(b<8104){debugger;b<7947?(s=vE(t),t=iL(a),s=t instanceof s,_=Q$(s)):(r=bF(t),t=wJ(-r));}else{debugger;s=ac1(h);t=n[c++];!s?c+=t:P=10;}}else b<8336?b<8293?b<8276?s=lL({}):t=cG([]):b<8294?(s=Ee(t),t=O5(s),s=t-s,s=fI(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=BZ(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():b<8628?b<8337?(s=ec(u),t=MI(a),s=t^s,r=jT(s)):(s=fj(h),t=cZ(_),s=t|s,v=q9(s)):(s=d1(u),u=n[c++],s[constantPool[u]]-=1);}else b<9342?b<8964?b<8954?b<8677?(s=x1(m),t=N(s),s=t<=s,r=NZ(s)):(s=wW(t),t=cF(u),u=$T(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=xH(u)):b<8955?(s=n[c++],t=mm(s)):(s=xN(h),t=j11(_),s=t|s,v=yw(s)):b<9185?b<8965?(s=n[c++],t=b11(s)):(s=SH(u),t=tX(a),s=t^s,r=BK(s)):(r=jC(u),s=aw(~r)):b<9560?b<9380?b<9343?(s=SR(h),t=uY(_),s=t|s,v=I7(s)):(s=n[c++],t=PO(s)):b<9381?(s=jQ(u),t=de(s),t.push(s),s=WB(t)):(s=j1(h),t=n5(_),s=t|s,v=L5(s)):b<9603?b<9561?(s=DP(u),t=z_(s),s=t<s,s=qh(s)):(s=RA(r),t=E0(P),s=t<s,h=MQ(s)):(s=nV(u),t=Kc(s),s=t in s,s=JB(s));}else if(b<10934){debugger;if(b<10454){debugger;if(b<10355){debugger;if(b<9882){debugger;if(b<9690){debugger;if(b<9609){debugger;s=mk(t);t=u11(a);s=t instanceof s;_=tv(s);}else{debugger;return;}}else b<9691?(s=n[c++],t=Au(constantPool[s])):(s=f_(u),t=On(t),s=t<<s,s=tW(s));}else if(b<10189){debugger;if(b<9883){debugger;s=gp(t);t=cv(s);s=t==s;a=C$(s);}else{debugger;s=XS(r);t=n[c++];s?(c+=t,_=VY(s)):P=10;}}else b<10190?(s=eC(a),t=rP(s),s=t>=s,t=ay(s)):(r=cl(u),a=_O(!r));}else b<10397?b<10385?b<10356?t=ev([]):(s=Vv(r),t=Bn(h),s=t!==s,s=FW(s)):b<10386?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=xf(r);z=[];for(t=0;t<s;t++){z.splice(0,0,_S(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=n$(a);}():function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=ox(r);z=[];for(t=0;t<s;t++){z.splice(0,0,cn(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=YT(a);}():b<10425?b<10398?(s=H7(a),t=KY(s),s=t>=s,t=Lb1(s)):function(){s=Xc1(t);throw s;}():(i=m,s=yK(m));}else if(b<10688){debugger;if(b<10680){debugger;b<10510?b<10455?(s=TZ(u),t=mD(t),s=t<<s,s=ny(s)):function(){w=[];u=n[c++];t=x7(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():b<10511?(s=$o(m),t=WJ(s),s=t<=s,r=aB(s)):(s=n[c++],t=Rf(u),u=gq(s),t[constantPool[s]]=u);}else if(b<10684){debugger;if(b<10681){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;r=OC(t);t=av(-r);}}else{debugger;s=n[c++];c+=s;}}else b<10815?b<10780?b<10689?(s=dg(t),t=eo(u),u=Sv(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=_T(u)):(s=Oe(a),t=KL(r),s=t-s,h=DY(s)):b<10781?(s=n[c++],t=Wo(u),u=o0(s),t[constantPool[s]]=u):(s=H$(u),u=n[c++],s[constantPool[u]]-=1):b<10918?b<10816?(s=J9(u),t=RG(a),s=t>>s,r=MB(s)):(s=e3(u),t=TM(s),t.push(s),s=bS(t)):(s=mG(r),t=sG(u),u=Jc(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else b<11393?b<11225?b<11114?b<11007?b<10935?(s=Pt(u),u=n[c++],s[constantPool[u]]-=1):s=zb1({}):b<11008?(s=Fg(),t=PI(),s=t!=s,nm(s)):s=kA(t):b<11202?b<11115?(s=_D(u),t=Lb(s),t.push(s),s=zi(t)):(r=X9(u),s=$l(typeof r)):(r=uc(u),s=aC(typeof r)):b<11322?b<11249?b<11226?(s=n[c++],c+=s):function(){s=wt(t);t=Ax(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=$U(s);}():b<11250?(s=n[c++],c+=s):function(){w=[];u=n[c++];t=iH(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():b<11376?b<11323?(s=n[c++],t=_w(constantPool[s])):(r=gb1(u),s=yb(typeof r)):(s=n[c++],t=ri1(u),u=xQ(s),t[constantPool[s]]=u):b<11679?b<11553?b<11481?b<11394?(s=qu(a),t=ph(s),s=t>=s,t=KX(s)):(s=X$(u),t=YO(t),s=t<<s,s=vT(s)):b<11482?(r=VV(t),t=YE(-r)):(s=$_(r),t=DT(P),s=t<s,h=jD(s)):b<11671?b<11554?function(){s=As(t);throw s;}():(s=IE(u),t=b3(s),s=t in s,s=B31(s)):s=OR(t):b<11850?b<11822?b<11680?(s=n[c++],t=mr(constantPool[s])):(s=s$(r),t=_m(u),u=to(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<11823?(s=n[c++],t=WT(u),u=n01(s),t[constantPool[s]]=u):(s=Md(u),t=Ip(u),s=t>s,t=CC(s)):b<12174?b<11851?(s=Tx(h),t=$1(s),s=t>>>s,t=mX(s)):(g=sA(s),p=_N(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=Tj(u),t=S3(s),u=delete t[s],s=sj(u));}else if(b<18760){debugger;if(b<15399){debugger;if(b<13768){debugger;b<12846?b<12634?b<12479?b<12295?b<12178?(s=Y31(u),t=LQ(s),u=delete t[s],s=dw(u)):(s=ai(t),t=kj(s),s=t/s,u=o11(s)):b<12296?(s=UH(u),t=YC(s),t.push(s),s=JO(t)):(s=Rk(u),t=OP(a),s=t>>s,r=oY(s)):b<12571?b<12480?(g=Tg(s),p=mc1(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=Sb1(r),t=Ln1(h),s=t*s,_=DR(s)):b<12572?(s=Id(t),t=O21(a),s=t instanceof s,_=ch(s)):(s=n[c++],c+=s):b<12791?b<12652?b<12635?(s=cw(u),t=JY(s),s=t<s,s=F0(s)):(s=_K(),t=vp(),s=t!=s,Kk(s)):b<12653?(s=Ii(t),t=Hz(s),s=t/s,u=uV(s)):(s=Gy(t),t=mY(s),s=t==s,a=_d(s)):b<12798?b<12792?(s=A4(P),u=n[c++],s[constantPool[u]]+=1):(s=n[c++],t=n1(s)):(s=KO(a),t=Fn1(r),s=t-s,h=kb1(s)):b<13043?b<12958?b<12908?b<12847?(s=hx(u),t=KA(a),s=t%s,r=k$(s)):(s=n[c++],t=rv(constantPool[s])):b<12909?(s=K9(t),t=fG(s),s=t===s,u=ew(s)):(s=rH(a),t=TE(r),s=t-s,h=KM(s)):b<13020?b<12959?(s=Zp(P),u=n[c++],s[constantPool[u]]+=1):(r=zC(u),s=oX(typeof r)):(s=pi1(),t=wO(),s=t!=s,nW(s)):b<13472?b<13314?b<13044?(s=Br(t),t=u5(u),u=Da(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=dq(u)):(s=oq(t),t=Vl(s),s=t-s,s=Ub(s)):b<13315?s=aL(t):s=DI({}):b<13618?b<13473?(s=CS(t),t=$F(s),s=t-s,s=DX(s)):(s=CX(t),t=xV(u),u=jb(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=g7(u)):function(){s=n[c++];u=Rn1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,B9(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=l8(a);}();}else if(b<14777){debugger;b<14259?b<13876?b<13839?b<13769?function(){debugger;}():(s=n[c++],t=gh(s)):b<13840?(g=Hg(s),p=N11(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(g=Tb(s),p=I21(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<14191?b<13877?(r=K_(u),s=L(~r)):(s=ui1(u),t=ni(s),u=delete t[s],s=by(u)):b<14192?(s=_n1(r),t=wL(h),s=t!==s,s=pf(s)):(s=hh(t),t=ce(s),s=t-s,s=af(s)):b<14543?b<14440?b<14260?(s=oN(t),t=fF(a),s=t instanceof s,_=Mr(s)):(r=Ef(s),t=Ko(void r)):b<14441?(s=bh(),t=MD(),s=t!=s,lb1(s)):(s=xh(u),t=vj(u),s=t>s,t=eM(s)):b<14567?b<14544?(s=n[c++],t=lC(s)):(s=Ml(t),t=my(u),u=oM(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=SF(u)):(s=SI(m),t=DJ(s),s=t<=s,r=$P(s));}else if(b<15085){debugger;b<14930?b<14873?b<14778?(s=yg(u),t=SS(u),s=t>s,t=Ev(s)):(s=d6(r),t=CF(u),u=gY(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<14874?(s=WF(u),t=V3(s),s=t in s,s=GB(s)):(s=n[c++],t=vt(s)):b<15056?b<14931?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=cC(r);z=[];for(t=0;t<s;t++){z.splice(0,0,tl(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=t0(a);}():(s=L_(m),t=kc1(s),s=t<=s,r=dD(s)):(s=uA(u),t=i5(u),s=t>s,t=uD(s));}else if(b<15311){debugger;b<15135?b<15086?s=DB(t):(s=oE(t),t=Bn1(u),s=t&s,u=D_(s)):b<15136?s=kD(t):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=PV(r);z=[];for(t=0;t<s;t++){z.splice(0,0,CH(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=zZ(a);}();}else if(b<15367){debugger;b<15312?s=sK({}):(s=At(t),t=Qm(u),s=t&s,u=kO(s));}else{debugger;return;}}else if(b<17153){debugger;if(b<16555){debugger;if(b<16045){debugger;if(b<15619){debugger;if(b<15554){debugger;b<15400?(s=UW(t),t=D31(s),s=t-s,s=iW(s)):s=_01({});}else if(b<15555){debugger;s=y4(t);t=qv(u);s=t&s;u=IT(s);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=RK(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<15863?b<15620?(s=Is(r),t=ka(h),s=t!==s,s=ff(s)):function(){s=n[c++];u=d9(r);z=[];for(t=0;t<s;t++){z.splice(0,0,HB(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=Wl(a);}():b<15864?(s=ue1(t),t=r1(s),s=t===s,u=Jd(s)):(s=a21(),t=Pr(),s=t!=s,Dd(s));}else b<16157?b<16118?b<16046?(r=vR(s),t=xY(void r)):(s=n[c++],t=qR(u),u=Ep(s),t[constantPool[s]]=u):b<16119?(r=qA(u),s=zt(~r)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=WC(s)):b<16534?b<16158?s=Ww(t):function(){s=ss(t);throw s;}():(s=Xf(r),t=$B(u),u=f6(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<16890){debugger;b<16651?b<16630?b<16556?(s=P1(r),t=Te(u),u=mc(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=qG(r),t=fR(u),u=LV(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<16631?(s=n[c++],t=bw(constantPool[s])):(s=EQ(a),t=zX(s),s=t>=s,t=R31(s)):b<16798?b<16652?(s=B$(t),t=$z(s),s=t/s,u=_b1(s)):(s=mn1(u),t=Jr(t),s=t<<s,s=jc1(s)):function(){w=[];u=n[c++];t=HN(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}();}else if(b<17096){debugger;if(b<16963){debugger;b<16891?function(){s=BH(t);t=aW(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=jk(s);}():(s=yV(u),t=lE(a),s=t>>s,r=mL(s));}else if(b<16964){debugger;s=LR(t);t=yd(s);s=t==s;a=PL(s);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=H8(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<17142?b<17097?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,fi1()):a.splice(0,0,oi1());}s=W5(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=n[c++],t=hQ(s)):(s=k9(t),t=Nc(u),s=t&s,u=Fu(s));}else if(b<18037){debugger;if(b<17538){debugger;if(b<17342){debugger;if(b<17235){debugger;b<17154?(s=nr(u),u=n[c++],s[constantPool[u]]-=1):(s=n[c++],t=ti1(u),u=E31(s),t[constantPool[s]]=u);}else if(b<17236){debugger;v=e.CFf;for(s=0;s<v;s++){_=z3(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;(function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,Y6()):a.splice(0,0,Jv());}s=u0(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}})();}}else b<17515?b<17343?(s=hf(t),t=Pc(s),s=t+s,s=FJ(s)):(s=n[c++],t=$G(u),u=ly(s),t[constantPool[s]]=u):(s=HJ(u),t=xp(u),s=t>s,t=SX(s));}else if(b<17807){debugger;b<17594?b<17539?(s=O(t),t=wP(s),s=t/s,u=oz(s)):(s=Xi1(u),t=Um(a),u=GL(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<17595?(s=TB(),t=_u(),s=t!=s,mz(s)):(s=l31(t),t=HT(u),u=A6(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=PT(u));}else if(b<17990){debugger;b<17808?(r=mE(t),t=kp(-r)):function(){debugger;}();}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=Nr(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<18373?b<18171?b<18105?b<18038?(s=n[c++],t=vO(u),u=nh(s),t[constantPool[s]]=u):(s=OK(u),t=RZ(a),s=t%s,r=cJ(s)):b<18106?(s=ix(r),t=kB(P),s=t<s,h=rF(s)):(s=AC(t),t=tb(s),s=t-s,s=M0(s)):b<18303?b<18172?(s=n[c++],t=GG(u),u=ag(s),t[constantPool[s]]=u):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,d2()):a.splice(0,0,VL());}s=Lo(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():t=bJ(d):b<18536?b<18500?b<18374?(s=Qx(u),t=kT(a),s=t%s,r=PG(s)):(i=m,s=M11(m)):b<18501?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=He(r);z=[];for(t=0;t<s;t++){z.splice(0,0,EY(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Qi(a);}():function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=rn1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,xD(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Rm(a);}():b<18722?b<18537?function(){s=n[c++];u=Av(r);z=[];for(t=0;t<s;t++){z.splice(0,0,MS(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=mI(a);}():(s=E1(P),u=n[c++],s[constantPool[u]]+=1):(s=sv(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<21873){debugger;if(b<20237){debugger;if(b<19606){debugger;if(b<19222){debugger;if(b<19010){debugger;if(b<18975){debugger;if(b<18761){debugger;s=Xh(r);t=n[c++];s?(c+=t,_=Rr(s)):P=10;}else{debugger;(function(){s=e.CFf;for(t=0;t<s;t++)P=AF(t),l[t]!=undefined?m[P]=l[t]:u=l[t];})();}}else b<18976?(s=Bc1(t),u=n[c++],s?c+=u:P=9):(s=AB(u),t=Ub1(a),u=HM(r),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<19100){debugger;if(b<19011){debugger;s=$g(u);t=TS(s);s=t in s;s=i31(s);}else{debugger;s=$d(h);t=n[c++];!s?c+=t:P=10;}}else b<19101?(g=Za(s),p=Rb(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=uK(u),t=gb(s),t.push(s),s=S8(t));}else if(b<19498){debugger;b<19272?b<19223?s=S31({}):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=u4(s)):b<19273?(r=p0(u),s=Oi(~r)):(s=q11(r),t=tk(h),s=t!==s,s=wd(s));}else if(b<19563){debugger;if(b<19499){debugger;v=e.CFf;for(s=0;s<v;s++){_=G11(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=Rq(r);t=wT(u);u=jU(s);u.variablePool!=undefined?o(u,t,s):u[t]=s;}}else{debugger;s=n[c++];t=qj(u);u=Gc1(s);t[constantPool[s]]=u;}}else b<19807?b<19673?b<19638?b<19607?(s=Un1(r),t=gv(u),u=Oq(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=RT(t),t=Q9(u),s=t&s,u=uN(s)):b<19639?(s=sz(u),t=$r(a),u=p8(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=gr(u),t=Ie(s),t.push(s),s=GA(t)):b<19785?b<19674?(s=ac(u),t=_U(t),s=t<<s,s=Kp(s)):(s=zF(u),t=kb(u),s=t>s,t=E11(s)):function(){s=n[c++];u=Lx(r);z=[];for(t=0;t<s;t++){z.splice(0,0,nI(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=J1(a);}():b<19969?b<19857?b<19808?(s=lI(h),t=z5(s),s=t>>>s,t=Oe1(s)):(s=n[c++],t=Vi1(constantPool[s])):b<19858?(i=m,s=Jh(m)):(s=Uc(u),t=m7(a),s=t%s,r=pY(s)):b<20085?b<19970?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=Mm(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Zc1(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=GV(a);}():(r=Pe(u),s=iy(typeof r)):(s=Wr(a),t=_3(s),s=t>=s,t=cx(s));}else if(b<21017){debugger;if(b<20501){debugger;if(b<20327){debugger;if(b<20286){debugger;if(b<20238){debugger;(function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,em()):a.splice(0,0,jm());}s=uM(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}})();}else{debugger;s=jZ(u);t=n[c++];!s?(c+=t,a=$f(s)):P=10;}}else b<20287?(r=g1(t),t=RB(-r)):(s=of(t),u=n[c++],s?c+=u:P=9);}else b<20499?b<20328?(s=HL(u),t=j5(a),s=t%s,r=IS(s)):(r=Di(t),t=_r(-r)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,su()):a.splice(0,0,hq());}s=Ij(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}();}else if(b<20836){debugger;if(b<20642){debugger;b<20502?(s=xy(t),t=cs(u),s=t&s,u=Dw(s)):(s=qd(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<20643){debugger;s=Nn1(h);t=bq(s);s=t>>>s;t=v6(s);}else{debugger;s=Z6(u);t=n[c++];!s?(c+=t,a=U3(s)):P=10;}}else b<21009?b<20837?(s=$N(t),t=E5(s),s=t+s,s=I8(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=XX(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=n[c++],t=CM(constantPool[s]));}else if(b<21610){debugger;if(b<21351){debugger;b<21087?b<21018?(s=zx(t),t=sJ(s),s=t-s,s=Hp(s)):(s=bl(r),t=qy(u),u=RW(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<21088?t=GZ([]):(s=BJ(r),t=rq(h),s=t!==s,s=on(s));}else if(b<21510){debugger;if(b<21352){debugger;return;}else{debugger;s=hE(r);t=iM(h);s=t*s;_=Nz(s);}}else{debugger;g=J6(s);p=ur(t);p.variablePool!=undefined?o(p,g,_):p[g]=s;}}else b<21773?b<21621?b<21611?(s=S11(t),t=Tr(s),s=t===s,u=Hm(s)):function(){debugger;}():b<21622?(s=_G(t),t=Ri1(u),s=t&s,u=Yj(s)):(s=lW(h),t=Vn1(s),s=t>>>s,t=ik(s)):b<21861?b<21774?function(){s=e.CFf;for(t=0;t<s;t++)P=oh(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(r=Dn(t),t=lc1(-r)):function(){s=n[c++];u=ax(r);z=[];for(t=0;t<s;t++){z.splice(0,0,eH(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=le(a);}();}else if(b<23772){debugger;if(b<22855){debugger;if(b<22562){debugger;if(b<22292){debugger;b<22028?b<21874?(s=Mp(r),t=Cc1(P),s=t<s,h=Ca(s)):(s=lR(r),t=xs(P),s=t<s,h=VA(s)):b<22029?(s=wc1(r),t=Lu(P),s=t<s,h=Ve1(s)):(s=n[c++],t=bv(u),u=w8(s),t[constantPool[s]]=u);}else if(b<22387){debugger;b<22293?(s=RR(u),t=OS(t),s=t<<s,s=M$(s)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=cf(r);z=[];for(t=0;t<s;t++){z.splice(0,0,cY(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Ba(a);}();}else if(b<22388){debugger;v=e.CFf;for(s=0;s<v;s++){_=TF(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=x3(u);t=BA(s);s=t in s;s=QU(s);}}else b<22779?b<22720?b<22563?function(){w=[];u=n[c++];t=aS(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=U11(t),t=iR(s),s=t+s,s=zr(s)):b<22721?s=Ja(t):s=Vy({}):b<22854?b<22780?function(){debugger;}():(s=SB(t),t=j6(a),s=t instanceof s,_=$J(s)):(s=xe1(t),t=z4(s),s=t-s,s=$A(s));}else if(b<23210){debugger;if(b<23127){debugger;b<23032?b<22856?(r=Eb1(u),a=$s(!r)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=on1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Y$(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=V(a);}():b<23033?(s=n[c++],t=La(constantPool[s])):(s=Qc(t),t=uy(s),s=t-s,s=bK(s));}else if(b<23199){debugger;if(b<23128){debugger;s=h_(u);t=Us(s);t.push(s);s=_I(t);}else{debugger;return;}}else{debugger;r=sC(t);t=bN(-r);}}else if(b<23452){debugger;b<23229?b<23211?(s=t31(r),t=gO(h),s=t!==s,s=YL(s)):(s=n[c++],t=fr(s)):b<23230?(s=rC(a),t=o_(r),s=t-s,h=B21(s)):(s=nG(r),t=T2(h),s=t*s,_=mF(s));}else if(b<23672){debugger;b<23453?function(){s=n[c++];u=XW(r);z=[];for(t=0;t<s;t++){z.splice(0,0,bQ(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=jF(a);}():(s=LU(a),t=aG(s),s=t>=s,t=fi(s));}else{debugger;s=u$(u);t=n[c++];!s?(c+=t,a=dY(s)):P=10;}}else if(b<24398){debugger;if(b<24100){debugger;if(b<23974){debugger;if(b<23939){debugger;if(b<23773){debugger;s=MH(t);t=P_(u);u=Xb(a);u.variablePool!=undefined?o(u,t,s):u[t]=s;s=Cs(u);}else{debugger;s=N5(h);t=n[c++];!s?c+=t:P=10;}}else b<23940?(s=fL(r),t=l11(P),s=t<s,h=cb(s)):function(){debugger;}();}else b<24089?b<23975?s=CP(t):(s=Fn(t),t=NB(s),s=t==s,a=VT(s)):(s=bk(u),t=jR(s),u=delete t[s],s=H(u));}else if(b<24270){debugger;if(b<24197){debugger;if(b<24101){debugger;(function(){s=e.CFf;for(t=0;t<s;t++)P=De1(t),l[t]!=undefined?m[P]=l[t]:u=l[t];})();}else{debugger;s=XU(u);t=n[c++];!s?(c+=t,a=D3(s)):P=10;}}else b<24198?(s=Ne1(u),t=yx(a),s=t^s,r=BU(s)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=jH(s));}else b<24376?b<24271?(s=c5(r),t=lx(P),s=t<s,h=tY(s)):(s=YA(t),u=n[c++],s?c+=u:P=9):function(){s=e.CFf;for(t=0;t<s;t++)P=y0(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}();}else if(b<25012){debugger;b<24770?b<24569?b<24399?(s=a31(t),t=BW(s),s=t+s,s=ej(s)):function(){s=AO(t);t=ob1(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=RE(s);}():b<24570?(s=Ci1(m),t=Y1(s),s=t<=s,r=Ie1(s)):(s=Xp(r),t=U$(h),s=t*s,_=VI(s)):b<24824?b<24771?(s=ai1(u),t=hr(a),s=t^s,r=Fj(s)):(s=D21(u),t=ak(a),s=t^s,r=h21(s)):(s=iN(h),t=n4(s),s=t>>>s,t=tq(s));}else if(b<25166){debugger;b<25089?b<25013?function(){s=H3(t);t=M3(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=g01(s);}():(s=hY(u),u=n[c++],s[constantPool[u]]-=1):b<25090?function(){s=Wv(t);t=pn(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=sf(s);}():(s=hn1(u),t=Ni1(a),s=t%s,r=r4(s));}else if(b<25174){debugger;if(b<25167){debugger;g=CL(s);p=tm(t);p.variablePool!=undefined?o(p,g,_):p[g]=s;}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=IJ(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else{debugger;s=nC(u);t=vw(a);s=t^s;r=Ay(s);}}else if(b<36706){debugger;if(b<30752){debugger;if(b<28270){debugger;if(b<26797){debugger;if(b<25830){debugger;if(b<25600){debugger;if(b<25537){debugger;if(b<25390){debugger;if(b<25252){debugger;s=dG(r);t=n[c++];s?(c+=t,_=Gn1(s)):P=10;}else{debugger;s=Eu(t);t=Wq(s);s=t===s;u=aQ(s);}}else if(b<25391){debugger;s=Vg(t);t=_R(s);s=t/s;u=lw(s);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<25576?b<25538?(s=d$(t),t=Pg(s),s=t===s,u=rd(s)):(s=g0(u),t=$c(a),u=Hv(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<25577?function(){s=jv(t);throw s;}():(s=YH(r),t=GY(h),s=t*s,_=wA(s));}else if(b<25724){debugger;if(b<25674){debugger;b<25601?(s=yG(t),t=j_(a),s=t instanceof s,_=C11(s)):t=vb1([]);}else if(b<25675){debugger;s=WG(t);t=Vt(s);s=t===s;u=JR(s);}else{debugger;s=w31(r);t=n[c++];s?(c+=t,_=IG(s)):P=10;}}else b<25770?b<25725?(s=Rc(u),t=rc1(s),u=delete t[s],s=is(u)):(s=lB(u),t=e01(a),s=t>>s,r=J$(s)):(s=Ok(u),t=rQ(t),s=t<<s,s=Bv(s));}else b<26219?b<25974?b<25896?b<25831?(r=JD(u),s=AD(~r)):(s=X1(m),t=Q8(s),s=t<=s,r=Oa(s)):b<25897?(s=ho(u),t=G6(s),u=delete t[s],s=z0(u)):(s=J4(u),t=KQ(a),s=t>>s,r=K3(s)):b<26076?b<25975?(s=n[c++],c+=s):(s=n[c++],c+=s):t=vQ([]):b<26563?b<26424?b<26220?(s=oJ(u),u=n[c++],s[constantPool[u]]-=1):(s=D2(t),t=k5(s),s=t===s,u=x6(s)):b<26425?(s=n[c++],t=Bf(u),u=c1(s),t[constantPool[s]]=u):(s=un1(t),t=zT(s),s=t==s,a=RX(s)):b<26755?b<26564?(s=sc1(a),t=UG(r),s=t-s,h=qW(s)):t=$S([]):(s=q0(r),t=ZV(h),s=t!==s,s=OX(s));}else if(b<27598){debugger;if(b<26984){debugger;if(b<26894){debugger;if(b<26861){debugger;if(b<26798){debugger;s=L31(t);t=G2(s);s=t==s;a=xE(s);}else{debugger;s=py(r);t=n[c++];s?(c+=t,_=hi(s)):P=10;}}else b<26862?(s=cL(r),t=zH(u),u=ef(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(r=UA(u),a=Jb1(!r));}else if(b<26942){debugger;if(b<26895){debugger;return;}else{debugger;(function(){s=n[c++];u=BP(r);z=[];for(t=0;t<s;t++){z.splice(0,0,ee1(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=sD(a);})();}}else b<26943?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=j$(r);z=[];for(t=0;t<s;t++){z.splice(0,0,j0(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Y7(a);}():function(){s=lS(t);t=Dr(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=pd(s);}();}else b<27151?b<27108?b<26985?(s=pE(r),t=Ck(u),u=Tu(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=nu(h),t=r01(s),s=t>>>s,t=Y5(s)):b<27109?(g=XM(s),p=mv(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(i=m,s=Fb(m)):b<27553?b<27152?(s=E2(a),t=W11(r),s=t-s,h=it(s)):(s=Hh(t),t=v5(a),s=t instanceof s,_=Hs(s)):(s=Re1(u),t=ZY(s),s=t<s,s=d11(s));}else if(b<28025){debugger;if(b<27873){debugger;if(b<27642){debugger;if(b<27599){debugger;s=na(u);t=A11(s);s=t in s;s=Kb1(s);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=lo(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<27643?(s=xA(t),t=b_(s),s=t+s,s=Ws(s)):(r=ym(u),s=oP(~r));}else b<27904?b<27874?(s=Ab(r),t=fc(h),s=t*s,_=rb1(s)):(s=Ed(u),t=GF(t),s=t<<s,s=o7(s)):(s=I9(r),t=cm(u),u=ZJ(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<28075){debugger;b<28064?b<28026?(s=n[c++],t=Wi(constantPool[s])):(s=js(u),t=_c1(u),s=t>s,t=VO(s)):b<28065?(s=ec1(),t=wg(),s=t!=s,w9(s)):(s=b1(u),t=_A(a),u=Ee1(r),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<28245){debugger;if(b<28076){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;s=DK(u);t=SZ(s);s=t<s;s=Wc1(s);}}else{debugger;(function(){s=n[c++];u=Ek(r);z=[];for(t=0;t<s;t++){z.splice(0,0,wI(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=IC(a);})();}}else if(b<29244){debugger;if(b<28860){debugger;b<28621?b<28367?b<28331?b<28271?function(){s=e.CFf;for(t=0;t<s;t++)P=im(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=qO(r),t=QJ(u),u=Om(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<28332?function(){s=n[c++];u=p31(r);z=[];for(t=0;t<s;t++){z.splice(0,0,K(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=hH(a);}():(s=n[c++],c+=s):b<28424?b<28368?(s=yt(u),t=qZ(s),s=t in s,s=zn1(s)):(r=xj(u),s=Tm(typeof r)):b<28425?(s=za(t),t=YB(u),u=v4(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=p$(u)):(s=N9(a),t=Yf(r),s=t-s,h=t8(s)):b<28686?b<28641?b<28622?function(){debugger;}():function(){debugger;}():b<28642?t=$j(d):(s=n[c++],t=lX(constantPool[s])):b<28797?b<28687?s=H2({}):function(){s=e.CFf;for(t=0;t<s;t++)P=s9(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=V1(r),t=Mu(h),s=t*s,_=y8(s));}else if(b<29094){debugger;if(b<29008){debugger;if(b<28886){debugger;if(b<28861){debugger;s=D11({});}else{debugger;s=b$(r);t=n[c++];s?(c+=t,_=l_(s)):P=10;}}else b<28887?(s=BF(t),t=mA(s),s=t+s,s=gm(s)):(s=I0(t),t=p9(s),s=t+s,s=lG(s));}else b<29076?b<29009?(s=$v(r),t=Ct(u),u=Gs(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=_0(r),t=lr(h),s=t*s,_=LW(s)):(s=n[c++],t=fy(u),u=sy(s),t[constantPool[s]]=u);}else if(b<29121){debugger;b<29099?b<29095?(s=xr(t),t=UY(s),s=t==s,a=Lg(s)):(r=eU(t),t=NP(-r)):b<29100?(s=Wu(t),u=n[c++],s?c+=u:P=9):(s=VZ(m),t=Zd(s),s=t<=s,r=Ky(s));}else if(b<29238){debugger;if(b<29122){debugger;s=n[c++];t=IB(u);u=xZ(s);t[constantPool[s]]=u;}else{debugger;s=eL(h);t=n[c++];!s?c+=t:P=10;}}else{debugger;s=kf(r);t=FV(P);s=t<s;h=lp(s);}}else if(b<30039){debugger;if(b<29607){debugger;if(b<29451){debugger;if(b<29399){debugger;if(b<29245){debugger;s=x8(u);t=ts(u);s=t>s;t=j31(s);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<29400?function(){s=nc1(t);throw s;}():(s=tp(t),t=tN(s),s=t==s,a=iz(s));}else b<29587?b<29452?function(){s=tc(t);t=C21(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=Q2(s);}():(s=UN(t),t=F1(s),s=t===s,u=ZM(s)):(s=mh(h),t=f1(_),s=t|s,v=Pi(s));}else b<29791?b<29708?b<29608?(s=PC(u),u=n[c++],s[constantPool[u]]-=1):(s=T4(r),t=Yt(h),s=t!==s,s=$c1(s)):b<29709?function(){s=e.CFf;for(t=0;t<s;t++)P=W1(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=n[c++],t=MF(s)):b<3e4?b<29792?(s=R8(t),t=FK(s),s=t-s,s=EJ(s)):(s=OV(t),t=LK(s),s=t+s,s=NN(s)):t=oi(d);}else if(b<30291){debugger;if(b<30083){debugger;b<30055?b<30040?(s=mo(u),t=YV(t),s=t<<s,s=PA(s)):(s=L7(r),t=U6(h),s=t!==s,s=Wb1(s)):b<30056?(s=pc(u),t=f9(t),s=t<<s,s=Y0(s)):function(){debugger;}();}else if(b<30270){debugger;b<30084?(r=pM(u),s=Gt(typeof r)):(s=G21(u),t=dZ(s),u=delete t[s],s=Lz(u));}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=Ai(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else if(b<30636){debugger;b<30603?b<30292?(s=aX(h),t=nF(_),s=t|s,v=hg(s)):(s=Le1(u),t=ge1(a),s=t>>s,r=zK(s)):b<30604?function(){s=e.CFf;for(t=0;t<s;t++)P=sI(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():function(){debugger;}();}else if(b<30726){debugger;if(b<30637){debugger;(function(){w=[];u=n[c++];t=CT(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;})();}else{debugger;s=q(r);t=n[c++];s?(c+=t,_=FG(s)):P=10;}}else{debugger;return;}}else if(b<33448){debugger;if(b<32212){debugger;if(b<31612){debugger;if(b<31285){debugger;if(b<30959){debugger;if(b<30888){debugger;b<30753?(s=EX(t),t=ct(s),s=t-s,s=sL(s)):(s=OG(h),t=Wc(_),s=t|s,v=ri(s));}else if(b<30889){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;s=ce1(t);t=_b(s);s=t+s;s=Bl(s);}}else if(b<31073){debugger;b<30960?(s=fb(t),t=qb(u),u=AR(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=HF(u)):(s=Cl(u),t=Sy(s),u=delete t[s],s=N2(u));}else if(b<31074){debugger;s=Rl(P);u=n[c++];s[constantPool[u]]+=1;}else{debugger;return;}}else b<31492?b<31464?b<31286?(s=n[c++],t=xC(s)):(r=Ys(u),s=iP(~r)):b<31465?(s=Vq(t),t=mi(s),s=t-s,s=LI(s)):(s=pj(m),t=AS(s),s=t<=s,r=Zs(s)):b<31570?b<31493?(r=gC(t),t=vd(-r)):(s=n[c++],t=YY(s)):(s=b5(m),t=AT(s),s=t<=s,r=t01(s));}else if(b<31900){debugger;if(b<31663){debugger;if(b<31630){debugger;if(b<31613){debugger;r=ut(u);s=Bk(typeof r);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<31631?(s=GP(u),t=GO(a),u=Ab1(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=n[c++],t=Bc(u),u=uh(s),t[constantPool[s]]=u);}else b<31854?b<31664?(r=On1(u),s=z31(typeof r)):function(){s=e.CFf;for(t=0;t<s;t++)P=B4(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=_E(r),t=_$(h),s=t*s,_=$x(s));}else if(b<32039){debugger;if(b<31993){debugger;if(b<31901){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;(function(){w=[];u=n[c++];t=LO(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;})();}}else b<31994?(r=Q3(u),a=sd(!r)):(s=a01(a),t=t2(r),s=t-s,h=JT(s));}else b<32120?b<32040?(g=u8(s),p=BO(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):function(){w=[];u=n[c++];t=cE(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=n[c++],t=Qy(u),u=Cc(s),t[constantPool[s]]=u);}else if(b<32898){debugger;b<32796?b<32495?b<32351?b<32213?(s=hc(r),t=cB(u),u=rt(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=Se1(u),t=UU(a),s=t^s,r=oI(s)):b<32352?(s=Bg(u),t=ar(a),s=t%s,r=Dy(s)):(s=jE(t),t=fO(u),u=en(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=qP(u)):b<32748?b<32496?(s=Ke(u),t=Mi1(t),s=t<<s,s=Ag(s)):(s=Lj(r),t=wV(P),s=t<s,h=zj(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=co(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():b<32847?b<32826?b<32797?(s=ls(u),t=fN(s),t.push(s),s=C3(t)):(s=Qv(u),u=n[c++],s[constantPool[u]]-=1):b<32827?(s=n[c++],c+=s):(g=ug(s),p=r8(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<32877?b<32848?(r=dE(u),a=wD(!r)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=Uo(s)):(s=WE(u),t=uu(a),u=Dh(r),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<33175){debugger;if(b<33038){debugger;if(b<32975){debugger;b<32899?(s=c31(t),t=cU(u),u=Go(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=Zo(u)):(s=OZ(u),t=BN(u),s=t>s,t=Dp(s));}else if(b<32976){debugger;s=El(m);t=mC(s);s=t<=s;r=Qu(s);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<33097?b<33039?(s=X0(t),t=cq(u),s=t&s,u=Xc(s)):(s=KF(u),t=Z8(a),u=bY(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=n[c++],t=V21(u),u=nz(s),t[constantPool[s]]=u);}else if(b<33355){debugger;b<33287?b<33176?function(){s=n[c++];u=Uc1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,C4(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=HD(a);}():function(){s=VM(t);t=SU(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=pn1(s);}():b<33288?s=DS({}):(s=PP(t),t=Ru(u),s=t&s,u=ry(s));}else if(b<33432){debugger;b<33356?function(){s=e.CFf;for(t=0;t<s;t++)P=QM(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=Hr(),t=fE(),s=t!=s,F11(s));}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<35085){debugger;if(b<34175){debugger;if(b<33905){debugger;if(b<33621){debugger;b<33502?b<33449?(s=Jm(t),t=u3(s),s=t/s,u=Lw(s)):(s=iV(u),t=j21(a),s=t%s,r=hj(s)):b<33503?(s=iU(a),t=i21(r),s=t-s,h=lU(s)):(s=n[c++],t=FX(s));}else if(b<33781){debugger;if(b<33622){debugger;s=mb(h);t=n[c++];!s?c+=t:P=10;}else{debugger;r=vL(u);s=ve1(typeof r);}}else b<33782?(r=f01(u),s=Zj(typeof r)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=FT(s));}else b<33999?b<33964?b<33906?(s=_Y(u),t=Sq(a),u=M6(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(r=y$(u),s=X8(typeof r)):b<33965?(s=UO(P),u=n[c++],s[constantPool[u]]+=1):(s=LN(u),t=Ec(s),t.push(s),s=GJ(t)):b<34160?b<34e3?(s=dk(t),t=G$(s),s=t-s,s=R9(s)):(s=q$(r),t=Lq(h),s=t*s,_=c7(s)):(s=r11(t),u=n[c++],s?c+=u:P=9);}else if(b<34632){debugger;b<34421?b<34291?b<34176?s=gg(t):(s=Tk(a),t=rW(s),s=t>=s,t=sO(s)):b<34292?(s=iQ(u),t=lK(s),u=delete t[s],s=WX(u)):(s=HU(u),t=Zl(a),s=t^s,r=Me(s)):b<34534?b<34422?(s=uO(h),t=Fc1(_),s=t|s,v=fV(s)):(r=fU(t),t=ah(-r)):(s=n[c++],c+=s);}else if(b<34728){debugger;b<34654?b<34633?(s=ju(t),u=n[c++],s?c+=u:P=9):(s=hm(u),u=n[c++],s[constantPool[u]]-=1):b<34655?s=gM({}):(s=n[c++],t=or(u),u=P11(s),t[constantPool[s]]=u);}else if(b<34999){debugger;if(b<34729){debugger;s=Xm(h);t=_Z(_);s=t|s;v=km(s);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=uq(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else{debugger;(function(){w=[];u=n[c++];t=iB(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;})();}}else if(b<35943){debugger;b<35448?b<35221?b<35150?b<35086?s=MV(t):(s=yy(t),t=oc(s),s=t+s,s=X6(s)):b<35151?(s=O11(t),t=xG(s),s=t+s,s=s2(s)):s=FI({}):b<35445?b<35222?(s=bi(u),t=TR(t),s=t<<s,s=xX(s)):(s=GN(u),t=TT(a),s=t>>s,r=jG(s)):(s=QK(u),t=Tq(t),s=t<<s,s=zE(s)):b<35683?b<35487?b<35449?(s=m2(u),t=m21(a),s=t^s,r=Ew(s)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,th()):a.splice(0,0,Nw());}s=g31(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():b<35488?t=h$(d):(s=Zm(t),t=cr(s),s=t==s,a=eG(s)):b<35931?b<35684?(s=IM(m),t=sw(s),s=t<=s,r=og(s)):(s=vW(t),t=BD(s),s=t/s,u=ZK(s)):(s=_W(h),t=xW(_),s=t|s,v=XR(s));}else if(b<36368){debugger;b<36266?b<36038?b<35944?(s=fJ(t),t=Ib(s),s=t/s,u=E_(s)):(s=bi1(t),t=F8(s),s=t-s,s=F21(s)):b<36039?(s=n[c++],c+=s):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=To(s)):b<36325?b<36267?(s=pN(h),t=q3(s),s=t>>>s,t=XL(s)):(s=D7(t),t=Nf(s),s=t-s,s=Na(s)):function(){s=k_(t);throw s;}();}else if(b<36635){debugger;b<36371?b<36369?t=A2([]):(s=q1(u),t=Il(a),u=qN(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<36372?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=Ks(s)):(s=pu(u),t=kw(s),t.push(s),s=wl(t));}else if(b<36697){debugger;if(b<36636){debugger;v=e.CFf;for(s=0;s<v;s++){_=G31(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;r=Aa(u);s=f4(typeof r);}}else{debugger;s=_z(h);t=n[c++];!s?c+=t:P=10;}}else if(b<43801){debugger;if(b<40190){debugger;if(b<38640){debugger;if(b<37570){debugger;if(b<37091){debugger;if(b<36827){debugger;if(b<36780){debugger;b<36707?(r=rD(u),s=Ln(~r)):(s=kq(t),t=lD(s),s=t/s,u=AE(s));}else if(b<36781){debugger;s=n[c++];c+=s;}else{debugger;s=Cf(h);t=n[c++];!s?c+=t:P=10;}}else b<36968?b<36828?(s=y21(t),t=Zn1(u),s=t&s,u=tV(s)):(s=SG(u),t=Bh(a),s=t>>s,r=bG(s)):b<36969?(s=ES(u),u=n[c++],s[constantPool[u]]-=1):(r=Pj(u),s=wh(typeof r));}else if(b<37509){debugger;if(b<37427){debugger;if(b<37092){debugger;s=LB(t);t=bc1(s);s=t===s;u=JP(s);}else{debugger;s=Wj(r);t=n[c++];s?(c+=t,_=zp(s)):P=10;}}else b<37428?(s=Fc(t),t=N3(s),s=t+s,s=MU(s)):(s=us(u),t=pC(s),u=delete t[s],s=zy(u));}else b<37551?b<37510?t=R7(d):(s=n[c++],c+=s):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,uz()):a.splice(0,0,lj());}s=$Z(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}();}else if(b<38135){debugger;if(b<37809){debugger;if(b<37759){debugger;if(b<37571){debugger;s=SA(u);t=Tt(s);s=t<s;s=Ki(s);}else{debugger;return;}}else b<37760?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=BL(r);z=[];for(t=0;t<s;t++){z.splice(0,0,xk(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=hz(a);}():(s=px(r),t=KV(h),s=t*s,_=je1(s));}else b<38088?b<37810?(s=Fe1(h),t=fx(_),s=t|s,v=GM(s)):(s=IF(t),t=_k(s),s=t+s,s=TG(s)):function(){s=dx(t);throw s;}();}else if(b<38495){debugger;if(b<38218){debugger;b<38136?(s=sX(r),t=jy(h),s=t*s,_=MR(s)):(s=S2(u),t=c01(a),u=up(r),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<38219){debugger;s=mg(r);t=bB(h);s=t*s;_=et(s);}else{debugger;return;}}else b<38637?b<38496?(s=Gc(a),t=Bz(r),s=t-s,h=sq(s)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=hZ(r);z=[];for(t=0;t<s;t++){z.splice(0,0,aR(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=GK(a);}():(s=n[c++],c+=s);}else if(b<39097){debugger;if(b<38856){debugger;if(b<38733){debugger;if(b<38685){debugger;if(b<38641){debugger;s=K1(u);t=YK(s);t.push(s);s=ZC(t);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=CR(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<38686?(s=E(h),t=bO(_),s=t|s,v=$Y(s)):(r=Rt(u),s=_o(typeof r));}else b<38793?b<38734?(s=xq(a),t=aT(r),s=t-s,h=Kl(s)):function(){debugger;}():(s=Mf(h),t=v1(s),s=t>>>s,t=A1(s));}else if(b<39005){debugger;b<38952?b<38857?function(){debugger;}():function(){debugger;}():b<38953?(s=n[c++],t=yv(u),u=pG(s),t[constantPool[s]]=u):(s=Pa(a),t=BV(r),s=t-s,h=Ft(s));}else if(b<39054){debugger;b<39006?(s=ux(u),t=sU(a),s=t^s,r=yo(s)):function(){s=n[c++];u=Hk(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Kd(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=qH(a);}();}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=F2(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else if(b<39754){debugger;if(b<39582){debugger;if(b<39257){debugger;if(b<39098){debugger;s=EW(t);t=Fv(s);s=t/s;u=q31(s);}else{debugger;return;}}else b<39258?(s=hl(u),t=yQ(s),t.push(s),s=WZ(t)):(s=Qd(t),t=TH(u),s=t&s,u=$b1(s));}else b<39752?b<39583?(s=bn(u),t=nb(a),u=MM(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=Sb(),t=c9(),s=t!=s,NO(s)):(s=ON(u),u=n[c++],s[constantPool[u]]-=1);}else if(b<40003){debugger;if(b<39866){debugger;if(b<39755){debugger;v=e.CFf;for(s=0;s<v;s++){_=Q21(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=JW(u);t=EG(a);u=LF(r);u.variablePool!=undefined?o(u,t,s):u[t]=s;}}else b<39867?function(){s=e.CFf;for(t=0;t<s;t++)P=sx(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():function(){s=e.CFf;for(t=0;t<s;t++)P=qx(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}();}else b<40118?b<40004?(s=m_(a),t=Uw(s),s=t>=s,t=nw(s)):(s=n[c++],t=Ge(s)):(r=zg(u),a=_x(!r));}else if(b<41907){debugger;if(b<40971){debugger;if(b<40552){debugger;if(b<40237){debugger;if(b<40220){debugger;b<40191?(i=m,s=au(m)):(s=s3(u),u=n[c++],s[constantPool[u]]-=1);}else if(b<40221){debugger;s=n[c++];t=tR(u);u=dd(s);t[constantPool[s]]=u;}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=$0(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<40433?b<40238?function(){s=SN(t);throw s;}():function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=YF(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Z_(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=dr(a);}():b<40434?s=mf(t):(s=h31(u),t=rl(t),s=t<<s,s=jM(s));}else b<40831?b<40690?b<40553?function(){s=bo(t);throw s;}():t=pk(d):b<40691?(s=S1(t),t=uB(a),s=t instanceof s,_=wB(s)):t=W31(d):b<40932?b<40832?(s=_y(r),t=f3(P),s=t<s,h=iG(s)):(s=ji1(u),t=IZ(s),s=t<s,s=uJ(s)):(s=kP(u),t=J21(s),t.push(s),s=LP(t));}else if(b<41451){debugger;b<41388?b<41131?b<40972?function(){debugger;}():t=t11([]):b<41132?(s=Sw(t),t=Si(s),s=t==s,a=TJ(s)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,sb()):a.splice(0,0,yu());}s=yb1(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():b<41431?b<41389?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,y2()):a.splice(0,0,CK());}s=hK(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(r=_v(t),t=Je1(-r)):(r=Bb(s),t=In(void r));}else if(b<41752){debugger;b<41599?b<41452?(s=Wk(u),t=a7(t),s=t<<s,s=Mc1(s)):(s=Xl(t),u=n[c++],s?c+=u:P=9):b<41600?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,Q_()):a.splice(0,0,Yu());}s=IP(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():s=CE(t);}else if(b<41792){debugger;b<41753?(s=db(t),t=xi1(s),s=t===s,u=Uk(s)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,df()):a.splice(0,0,rb());}s=c4(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}();}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<42689){debugger;b<42495?b<42242?b<42074?b<41908?(s=De(t),t=v21(s),s=t==s,a=r31(s)):(s=Pq(u),t=f8(s),s=t<s,s=wS(s)):b<42075?(s=Qj(r),t=zc1(h),s=t*s,_=qe1(s)):(g=Nn(s),p=Ux(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<42396?b<42243?(s=n[c++],t=F9(constantPool[s])):(s=KB(t),t=b21(s),s=t-s,s=yL(s)):(s=ii1(t),u=n[c++],s?c+=u:P=9):b<42633?b<42577?b<42496?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=PZ(s)):(s=cV(u),t=h2(a),s=t^s,r=RY(s)):b<42578?(s=ie(u),t=eV(s),s=t<s,s=_6(s)):(s=m9(u),t=Ku(s),s=t<s,s=Gr(s)):b<42671?b<42634?(s=bb(t),u=n[c++],s?c+=u:P=9):(s=B(),t=ad(),s=t!=s,CJ(s)):(s=An(u),t=ab1(u),s=t>s,t=Wz(s));}else if(b<43304){debugger;if(b<43050){debugger;if(b<42824){debugger;if(b<42690){debugger;v=e.CFf;for(s=0;s<v;s++){_=R5(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=va(u);t=H21(s);s=t in s;s=Cu(s);}}else b<42825?(s=M21(a),t=I31(s),s=t>=s,t=dQ(s)):(s=Jn(u),t=e9(a),s=t^s,r=sl(s));}else b<43124?b<43051?function(){s=oK(t);throw s;}():(i=m,s=Je(m)):(s=XO(r),t=WV(u),u=rY(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<43544){debugger;if(b<43415){debugger;b<43305?(s=Ud(t),t=ZA(u),u=Jc1(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=e6(u)):function(){w=[];u=n[c++];t=XJ(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}();}else if(b<43416){debugger;s=Dm(m);t=eg(s);s=t<=s;r=So(s);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<43798){debugger;if(b<43545){debugger;s=S7(u);t=MZ(s);t.push(s);s=ZW(t);}else{debugger;s=gc(h);t=n[c++];!s?c+=t:P=10;}}else{debugger;s=mO(r);t=Kc1(u);u=Cn(s);u.variablePool!=undefined?o(u,t,s):u[t]=s;}}else if(b<47057){debugger;if(b<45592){debugger;if(b<44693){debugger;if(b<44080){debugger;b<43962?b<43843?b<43802?t=p6(d):function(){s=EL(t);throw s;}():b<43844?(s=Co(r),t=ru(P),s=t<s,h=TP(s)):(s=bc(u),t=u_(s),t.push(s),s=L2(t)):b<44040?b<43963?(s=MN(t),t=Yi1(s),s=t/s,u=li1(s)):(s=ol(t),t=aZ(u),u=IU(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=nv(u)):b<44041?function(){w=[];u=n[c++];t=O4(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=Oc(u),t=t21(s),s=t in s,s=jr(s));}else if(b<44375){debugger;if(b<44188){debugger;if(b<44081){debugger;s=Pb(u);t=n[c++];!s?(c+=t,a=MX(s)):P=10;}else{debugger;s=uc1(h);t=cc1(s);s=t>>>s;t=iE(s);}}else b<44189?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=Uj(r);z=[];for(t=0;t<s;t++){z.splice(0,0,KN(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Wg(a);}():s=N$({});}else b<44668?b<44376?(s=yE(r),t=Ua(h),s=t*s,_=By(s)):(s=n[c++],t=_8(s)):function(){s=jX(t);throw s;}();}else if(b<45144){debugger;b<44973?b<44892?b<44694?(s=gA(t),t=y9(s),s=t+s,s=XC(s)):(s=Ad(u),t=Wd(a),s=t%s,r=QO(s)):b<44893?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,Ki1()):a.splice(0,0,Em());}s=o2(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(r=H0(u),a=tO(!r)):b<45048?b<44974?(s=Ma(t),u=n[c++],s?c+=u:P=9):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,wv()):a.splice(0,0,xP());}s=QH(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=O7(u),t=Vm(s),t.push(s),s=DN(t));}else if(b<45431){debugger;if(b<45187){debugger;if(b<45145){debugger;s=sn(u);t=Sf(s);s=t in s;s=C7(s);}else{debugger;s=m0(h);t=n[c++];!s?c+=t:P=10;}}else if(b<45188){debugger;s=tf(t);t=Sa(u);s=t&s;u=OA(s);}else{debugger;s=Sp(h);t=n[c++];!s?c+=t:P=10;}}else if(b<45474){debugger;if(b<45432){debugger;s=K8(u);u=n[c++];s[constantPool[u]]-=1;}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=ht(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else{debugger;s=w0(h);t=Cb1(_);s=t|s;v=me(s);}}else if(b<46204){debugger;if(b<45817){debugger;if(b<45655){debugger;b<45616?b<45593?(s=xv(u),t=AV(s),s=t<s,s=W8(s)):(s=g11(m),t=Ra(s),s=t<=s,r=lk(s)):b<45617?(i=m,s=ks(m)):(s=$5(u),t=Ym(s),u=delete t[s],s=HX(u));}else if(b<45684){debugger;if(b<45656){debugger;s=jY(r);t=n[c++];s?(c+=t,_=dL(s)):P=10;}else{debugger;s=$p(u);u=n[c++];s[constantPool[u]]-=1;}}else{debugger;s=kl(a);t=Bu(s);s=t>=s;t=p2(s);}}else if(b<45944){debugger;if(b<45841){debugger;b<45818?(s=x0(u),t=OJ(t),s=t<<s,s=kk(s)):(s=Ov(),t=b31(),s=t!=s,kz(s));}else if(b<45842){debugger;s=Y3(r);t=Sl(P);s=t<s;h=jK(s);}else{debugger;return;}}else b<46195?b<45945?(r=ze(u),s=ra(typeof r)):(s=Tz(m),t=XD(s),s=t<=s,r=Dt(s)):t=Z(d);}else if(b<46493){debugger;b<46383?b<46295?b<46205?function(){s=$V(t);throw s;}():(s=n[c++],t=Po(constantPool[s])):b<46296?(i=m,s=se1(m)):(s=n[c++],c+=s):b<46491?b<46384?(s=YD(u),t=LZ(a),u=H5(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=D5(u),t=se(s),u=delete t[s],s=t9(u)):(s=_h(t),u=n[c++],s?c+=u:P=9);}else if(b<46878){debugger;if(b<46670){debugger;if(b<46494){debugger;s=kx(r);t=ms(h);s=t*s;_=o$(s);}else{debugger;s=GH(h);t=n[c++];!s?c+=t:P=10;}}else b<46671?(s=IY(t),t=Qb1(s),s=t/s,u=xd(s)):(s=Sn(r),t=hR(h),s=t!==s,s=J5(s));}else b<47044?b<46879?(s=zs(r),t=Y_(h),s=t!==s,s=ob(s)):(s=xe(t),t=II(u),s=t&s,u=g21(s)):(r=Z2(u),s=i9(~r));}else if(b<48280){debugger;if(b<47430){debugger;if(b<47230){debugger;b<47082?b<47060?b<47058?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=ZN(s)):(r=nt(u),s=BT(~r)):b<47061?(s=XY(t),t=$k(s),s=t===s,u=cu(s)):(s=fW(t),t=Sh(s),s=t==s,a=Fs(s)):b<47149?b<47083?(s=OU(m),t=Ac(s),s=t<=s,r=qi1(s)):(g=T_(s),p=Ug(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<47150?(s=n[c++],c+=s):(s=c_(r),t=Ld(P),s=t<s,h=hu(s));}else if(b<47344){debugger;if(b<47322){debugger;b<47231?(r=AH(u),s=Iz(typeof r)):(s=ki(a),t=W_(s),s=t>=s,t=gN(s));}else if(b<47323){debugger;v=e.CFf;for(s=0;s<v;s++){_=Ff(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=QW(t);}}else if(b<47411){debugger;if(b<47345){debugger;s=qs(u);t=qT(a);u=EH(r);u.variablePool!=undefined?o(u,t,s):u[t]=s;}else{debugger;s=Cw(h);t=n[c++];!s?c+=t:P=10;}}else{debugger;s=n[c++];t=ya(constantPool[s]);}}else if(b<48041){debugger;b<47796?b<47640?b<47431?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,V0()):a.splice(0,0,ne());}s=yz(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=eS(s)):b<47641?(s=Fl(u),t=C8(a),s=t>>s,r=Vf(s)):t=sY(d):b<48037?b<47797?(s=tG(t),u=n[c++],s?c+=u:P=9):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,ex()):a.splice(0,0,FB());}s=l2(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=kS(u),t=nn(a),s=t^s,r=Yy(s));}else if(b<48165){debugger;if(b<48111){debugger;if(b<48042){debugger;s=zI(h);t=n[c++];!s?c+=t:P=10;}else{debugger;r=eq(t);t=Vd(-r);}}else b<48112?(r=Jp(s),t=$8(void r)):(s=Ot(u),t=T7(u),s=t>s,t=CD(s));}else b<48275?b<48166?(s=NV(u),t=md(s),s=t in s,s=iA(s)):(s=n[c++],t=J_(s)):(s=t1(r),t=sc(h),s=t*s,_=L4(s));}else b<49017?b<48734?b<48515?b<48351?b<48281?t=O$(d):(s=QL(),t=h11(),s=t!=s,Ey(s)):b<48352?(s=QT(u),t=A_(u),s=t>s,t=vN(s)):function(){s=Kg(t);throw s;}():b<48702?b<48516?(s=n[c++],t=ZE(u),u=Zz(s),t[constantPool[s]]=u):function(){s=kM(t);throw s;}():(s=JF(u),t=k31(a),u=i$(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<48902?b<48847?b<48735?t=uL(d):(s=Dx(u),t=NA(a),s=t^s,r=rO(s)):b<48848?(r=UV(s),t=I4(void r)):(s=Z31(m),t=Pl(s),s=t<=s,r=I$(s)):b<48987?b<48903?(s=Vo(t),t=Qi1(u),s=t&s,u=Ai1(s)):(s=n[c++],t=IW(s)):s=n0({}):b<49524?b<49388?b<49225?b<49018?(s=_J(r),t=LS(h),s=t!==s,s=Mo(s)):(s=Px(t),t=oC(s),s=t==s,a=gP(s)):b<49226?(s=wF(a),t=ez(s),s=t>=s,t=xF(s)):(s=r5(t),t=QA(s),s=t/s,u=Y4(s)):b<49413?b<49389?(s=zq(t),t=vJ(s),s=t==s,a=qg(s)):(s=Rp(a),t=pa(s),s=t>=s,t=np(s)):(s=bj(u),t=Cj(s),s=t<s,s=e11(s)):b<49694?b<49677?b<49525?(s=_21(),t=eP(),s=t!=s,AL(s)):(s=mi1(u),t=OB(a),s=t%s,r=PJ(s)):b<49678?(s=gX(t),t=gG(s),s=t/s,u=Cp(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=iY(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():b<49779?b<49695?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,lc()):a.splice(0,0,UK());}s=f31(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(g=eN(s),p=Ia(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=fe(u),t=ig(s),u=delete t[s],s=Zf(u));}else if(b<74239){debugger;if(b<62025){debugger;if(b<55817){debugger;if(b<53265){debugger;if(b<51724){debugger;b<50991?b<50550?b<50350?b<50256?b<49817?(s=Ex(t),t=PE(s),s=t===s,u=cD(s)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=Li1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Cv(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Bb1(a);}():b<50257?(s=Fo(u),t=I1(a),s=t^s,r=RC(s)):(r=xl(u),a=h0(!r)):b<50531?b<50351?(s=Cr(a),t=YM(r),s=t-s,h=vn1(s)):(s=rj(r),t=vf(P),s=t<s,h=w4(s)):b<50532?(r=qk(t),t=mW(-r)):(s=R2(u),t=K6(u),s=t>s,t=KT(s)):b<50820?b<50632?b<50551?function(){debugger;}():(s=P9(t),t=lh(s),s=t===s,u=C2(s)):b<50633?(s=nf(u),t=Gb(t),s=t<<s,s=Ya(s)):(s=u1(t),t=yk(s),s=t/s,u=ql(s)):b<50932?b<50821?(s=x21(t),t=MP(u),u=oA(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=zi1(u)):(s=fb1(u),t=yD(t),s=t<<s,s=MO(s)):(s=z2(u),t=$q(u),s=t>s,t=n9(s)):b<51313?b<51223?b<51120?b<50992?function(){w=[];u=n[c++];t=Fz(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(r=We(u),s=AN(~r)):b<51121?(s=Ke1(h),t=s0(s),s=t>>>s,t=FS(s)):(s=Jy(u),t=pq(s),s=t in s,s=pH(s)):b<51271?b<51224?(s=LE(t),t=j4(s),s=t==s,a=Rh(s)):(s=Un(u),t=qD(t),s=t<<s,s=yj(s)):(s=sQ(u),t=aa(u),s=t>s,t=CY(s)):b<51553?b<51506?b<51314?(s=cW(h),t=IH(_),s=t|s,v=Lt(s)):(g=yh(s),p=SY(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<51507?(r=oV(s),t=Pn(void r)):(i=m,s=XG(m)):b<51664?b<51554?(s=Xb1(h),t=Nx(_),s=t|s,v=hb(s)):(s=NK(r),t=WD(u),u=j8(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=Jg(r),t=wX(u),u=w2(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<52568){debugger;if(b<52230){debugger;if(b<51990){debugger;b<51789?b<51725?(s=xb(),t=oj(),s=t!=s,NM(s)):(s=n3(t),t=jW(a),s=t instanceof s,_=M1(s)):b<51790?(s=A3(u),t=ty(s),t.push(s),s=br(t)):(r=r9(u),a=O1(!r));}else if(b<52207){debugger;if(b<51991){debugger;return;}else{debugger;r=iv(u);a=$21(!r);}}else b<52208?(r=_H(s),t=RF(void r)):(s=nc(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<52416){debugger;if(b<52326){debugger;if(b<52231){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;s=_L(u);t=Rx(s);t.push(s);s=g_(t);}}else b<52327?(s=kF(u),t=SW(a),u=dC(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=di1(u),u=n[c++],s[constantPool[u]]-=1);}else b<52473?b<52417?function(){w=[];u=n[c++];t=h1(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():function(){debugger;}():(s=mH(u),t=bt(a),s=t^s,r=gB(s));}else if(b<53041){debugger;b<52872?b<52735?b<52569?(s=n[c++],t=e8(s)):(s=yO(t),u=n[c++],s?c+=u:P=9):b<52736?(s=$2(u),t=qc1(a),s=t>>s,r=RI(s)):(s=ZI(u),t=rM(a),s=t>>s,r=T3(s)):b<52966?b<52873?(s=n[c++],c+=s):(r=nx(u),s=rp(~r)):(r=Wh(s),t=PW(void r));}else if(b<53148){debugger;if(b<53103){debugger;if(b<53042){debugger;s=Ix(u);u=n[c++];s[constantPool[u]]-=1;}else{debugger;s=HK(u);t=n[c++];!s?(c+=t,a=LY(s)):P=10;}}else b<53104?(s=v0(t),t=Wm(a),s=t instanceof s,_=xI(s)):function(){s=n[c++];u=sZ(r);z=[];for(t=0;t<s;t++){z.splice(0,0,XT(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=KG(a);}();}else b<53246?b<53149?(i=m,s=kL(m)):(s=Ji(u),t=WH(a),s=t%s,r=E3(s)):(r=jg(s),t=WN(void r));}else if(b<54695){debugger;if(b<54059){debugger;if(b<53454){debugger;if(b<53357){debugger;b<53346?b<53266?(s=Az(r),t=Ti(h),s=t!==s,s=gD(s)):s=Sd({}):b<53347?(r=vM(u),a=UX(!r)):(r=MY(s),t=Hn1(void r));}else if(b<53391){debugger;if(b<53358){debugger;s=am(r);t=Dj(h);s=t!==s;s=we(s);}else{debugger;return;}}else if(b<53392){debugger;s=P6(r);t=n[c++];s?(c+=t,_=dO(s)):P=10;}else{debugger;s=n[c++];t=Hb(constantPool[s]);}}else if(b<53689){debugger;if(b<53594){debugger;b<53455?function(){s=xb1(t);throw s;}():t=GE(d);}else if(b<53595){debugger;s=Ds(u);t=n[c++];!s?(c+=t,a=Kh(s)):P=10;}else{debugger;s=n[c++];t=QG(s);}}else if(b<53792){debugger;b<53690?(s=OY(t),t=pr(s),s=t+s,s=Nd(s)):(s=yn(t),t=RJ(s),s=t/s,u=Gj(s));}else{debugger;return;}}else if(b<54376){debugger;if(b<54269){debugger;if(b<54123){debugger;if(b<54060){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;s=tQ({});}}else if(b<54124){debugger;s=Zr(r);t=n[c++];s?(c+=t,_=uR(s)):P=10;}else{debugger;s=rU(r);t=Af(h);s=t!==s;s=Nh(s);}}else b<54321?b<54270?(s=s4(t),t=xR(a),s=t instanceof s,_=az(s)):(i=m,s=Xd(m)):(s=R6(),t=kJ(),s=t!=s,mb1(s));}else b<54623?b<54456?b<54377?(s=g9(a),t=il(s),s=t>=s,t=FD(s)):(s=g2(P),u=n[c++],s[constantPool[u]]+=1):b<54457?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=Q0(r);z=[];for(t=0;t<s;t++){z.splice(0,0,pp(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=eZ(a);}():(s=n[c++],t=XI(constantPool[s])):b<54681?b<54624?(r=tb1(u),s=P5(typeof r)):function(){s=D4(t);throw s;}():(s=dn1(t),t=QC(a),s=t instanceof s,_=wi1(s));}else if(b<55206){debugger;if(b<54875){debugger;if(b<54836){debugger;if(b<54819){debugger;if(b<54696){debugger;return;}else{debugger;s=hD(t);t=NQ(u);u=FN(a);u.variablePool!=undefined?o(u,t,s):u[t]=s;s=gn1(u);}}else b<54820?(s=$i1(t),t=KR(s),s=t==s,a=e7(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=Nm(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}();}else b<54871?b<54837?t=p5(d):(s=wR(r),t=zc(h),s=t!==s,s=hs(s)):t=CA(d);}else if(b<55060){debugger;if(b<54991){debugger;if(b<54876){debugger;s=fK(t);t=tS(s);s=t==s;a=fA(s);}else{debugger;s=DC(h);t=n[c++];!s?c+=t:P=10;}}else b<54992?t=p1([]):(g=aO(s),p=Oj(t),p.variablePool!=undefined?o(p,g,_):p[g]=s);}else b<55137?b<55061?s=a1({}):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=ng(s)):(s=wy(h),t=pQ(_),s=t|s,v=F31(s));}else b<55467?b<55299?b<55275?b<55207?(s=OM(u),t=Xv(a),s=t^s,r=Vk(s)):(r=VH(s),t=KJ(void r)):b<55276?(r=zu(s),t=qL(void r)):(r=Mc(s),t=ps(void r)):b<55377?b<55300?(s=Nc1(u),t=r6(a),s=t^s,r=Ao(s)):function(){s=n[c++];u=kU(r);z=[];for(t=0;t<s;t++){z.splice(0,0,wb(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=B0(a);}():(i=m,s=CV(m)):b<55644?b<55596?b<55468?function(){w=[];u=n[c++];t=L0(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(i=m,s=D0(m)):b<55597?function(){s=e.CFf;for(t=0;t<s;t++)P=Ri(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():s=k21(t):b<55816?b<55645?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,nR()):a.splice(0,0,aM());}s=cS(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(r=nO(u),s=kZ(~r)):(r=A31(u),a=HP(!r));}else if(b<59023){debugger;if(b<57654){debugger;if(b<56889){debugger;if(b<56413){debugger;if(b<56152){debugger;if(b<56070){debugger;b<55818?s=zY({}):(s=Yl(t),t=t5(a),s=t instanceof s,_=hp(s));}else if(b<56071){debugger;s=dS(u);t=MC(s);s=t<s;s=uW(s);}else{debugger;s=Xx(r);t=n[c++];s?(c+=t,_=En(s)):P=10;}}else b<56383?b<56153?(s=Zg(t),t=Ea(s),s=t/s,u=Xs(s)):(s=FU(u),t=jP(s),s=t<s,s=PY(s)):b<56384?(g=Ak(s),p=hV(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=bZ(t),t=J0(s),s=t-s,s=n2(s));}else if(b<56621){debugger;if(b<56577){debugger;if(b<56414){debugger;s=lP(t);t=Bp(s);s=t==s;a=oR(s);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<56578?(s=Ta(u),t=so(a),s=t>>s,r=f11(s)):(s=Ng(t),t=gL(u),s=t&s,u=li(s));}else b<56874?b<56622?(s=wp(u),t=Fd(s),s=t in s,s=Qk(s)):(r=T9(u),a=aN(!r)):(s=Yv(a),t=gj(r),s=t-s,h=BE(s));}else if(b<57239){debugger;b<57115?b<56955?b<56890?function(){s=e.CFf;for(t=0;t<s;t++)P=LC(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(r=dP(u),a=cX(!r)):b<56956?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=dh(r);z=[];for(t=0;t<s;t++){z.splice(0,0,c3(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=bL(a);}():(s=Ow(a),t=ky(s),s=t>=s,t=St(s)):b<57205?b<57116?(s=Wn(t),t=kc(s),s=t==s,a=PS(s)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,Sm()):a.splice(0,0,aP());}s=WO(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=ND(h),t=HG(s),s=t>>>s,t=oO(s));}else if(b<57494){debugger;if(b<57370){debugger;if(b<57240){debugger;s=b6(u);t=n[c++];!s?(c+=t,a=R21(s)):P=10;}else{debugger;s=q6(t);u=n[c++];s?c+=u:P=9;}}else b<57371?(s=vr(r),t=cj(h),s=t!==s,s=Qh(s)):(s=t4(u),t=G0(t),s=t<<s,s=IO(s));}else b<57518?b<57495?function(){s=id(t);t=Kb(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=po(s);}():(s=yi1(u),u=n[c++],s[constantPool[u]]-=1):(s=tn1(h),t=Gm(_),s=t|s,v=nU(s));}else if(b<58230){debugger;if(b<57954){debugger;b<57814?b<57761?b<57655?function(){debugger;}():function(){debugger;}():b<57762?s=M4({}):(r=V2(u),s=AX(typeof r)):b<57947?b<57815?(r=qp(u),a=rm(!r)):(s=p_(u),t=We1(s),s=t in s,s=jd(s)):(s=$i(t),t=D(u),s=t&s,u=pb1(s));}else if(b<58107){debugger;if(b<58027){debugger;if(b<57955){debugger;i=m;s=v7(m);}else{debugger;s=M7(u);t=n[c++];!s?(c+=t,a=P7(s)):P=10;}}else b<58028?(s=FQ(u),t=gn(a),s=t%s,r=xO(s)):function(){s=k(t);t=Sz(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=G9(s);}();}else if(b<58218){debugger;if(b<58108){debugger;(function(){w=[];u=n[c++];t=wc(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;})();}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else{debugger;i=m;s=lV(m);}}else if(b<58581){debugger;if(b<58428){debugger;if(b<58358){debugger;b<58231?(i=m,s=J(m)):(s=o4(u),t=vK(s),t.push(s),s=kd(t));}else if(b<58359){debugger;return;}else{debugger;s=n[c++];t=l3(constantPool[s]);}}else if(b<58577){debugger;if(b<58429){debugger;s=Hj(h);t=n[c++];!s?c+=t:P=10;}else{debugger;t=Kz([]);}}else{debugger;(function(){w=[];u=n[c++];t=lF(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;})();}}else if(b<58748){debugger;if(b<58658){debugger;if(b<58582){debugger;r=LX(u);a=jI(!r);}else{debugger;return;}}else if(b<58659){debugger;v=e.CFf;for(s=0;s<v;s++){_=Sg(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=n[c++];c+=s;}}else if(b<59001){debugger;b<58749?(s=Di1(u),t=Ha(a),s=t>>s,r=tD(s)):(s=V6(u),t=vF(s),u=delete t[s],s=Ir(u));}else{debugger;return;}}else if(b<60575){debugger;if(b<59730){debugger;if(b<59439){debugger;if(b<59249){debugger;b<59174?b<59024?(s=iF(h),t=K11(_),s=t|s,v=uH(s)):(i=m,s=G4(m)):b<59175?(r=dT(s),t=V8(void r)):(s=n[c++],c+=s);}else if(b<59339){debugger;if(b<59250){debugger;s=Q6(u);t=n[c++];!s?(c+=t,a=fD(s)):P=10;}else{debugger;s=cc(t);u=n[c++];s?c+=u:P=9;}}else b<59340?function(){debugger;}():(s=n[c++],c+=s);}else b<59623?b<59499?b<59440?s=$u({}):function(){w=[];u=n[c++];t=sn1(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():b<59500?(s=Os(t),t=Xe1(s),s=t===s,u=TY(s)):(s=P3(a),t=vA(s),s=t>=s,t=d01(s)):b<59645?b<59624?(s=zn(),t=U0(),s=t!=s,b0(s)):(s=Iq(t),t=a5(s),s=t-s,s=Zw(s)):t=a2(d);}else if(b<60162){debugger;if(b<59918){debugger;if(b<59849){debugger;if(b<59731){debugger;s=ee(u);t=du(a);s=t%s;r=dm(s);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<59850?(s=nE(t),t=Ib1(u),u=bm(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=lZ(u)):(s=U9(r),t=Be1(P),s=t<s,h=wj(s));}else if(b<60132){debugger;b<59919?(s=Hi1(t),t=ME(s),s=t/s,u=$$(s)):(s=E7(P),u=n[c++],s[constantPool[u]]+=1);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<60432?b<60290?b<60163?(s=ZF(u),t=H6(u),s=t>s,t=L6(s)):function(){s=D$(t);throw s;}():b<60291?(i=m,s=ZX(m)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=Kv(s)):b<60558?b<60433?(s=Pz(u),t=lv(s),t.push(s),s=v11(t)):(r=OO(u),a=Yn(!r)):(s=C5(r),t=$X(P),s=t<s,h=K21(s));}else if(b<61315){debugger;b<61057?b<60956?b<60632?b<60576?(s=HC(t),t=uP(s),s=t===s,u=w_(s)):(s=kr(t),u=n[c++],s?c+=u:P=9):b<60633?s=Fq(t):(s=ab(u),t=SK(a),s=t^s,r=os(s)):b<60973?b<60957?(r=Q5(u),a=ns(!r)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=E$(s)):function(){s=n[c++];u=Df(r);z=[];for(t=0;t<s;t++){z.splice(0,0,KE(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=Sc1(a);}():b<61283?b<61138?b<61058?(s=rA(u),t=gt(s),s=t in s,s=zh(s)):(s=OH(t),u=n[c++],s?c+=u:P=9):b<61139?(s=Jl(r),t=hN(h),s=t*s,_=ye(s)):function(){w=[];u=n[c++];t=bz(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():b<61306?b<61284?t=qf(d):(s=td(P),u=n[c++],s[constantPool[u]]+=1):(s=Tl(t),t=Zc(a),s=t instanceof s,_=v01(s));}else if(b<61765){debugger;if(b<61611){debugger;if(b<61532){debugger;b<61316?function(){debugger;}():(s=ye1(u),t=G3(u),s=t>s,t=bD(s));}else if(b<61533){debugger;v=e.CFf;for(s=0;s<v;s++){_=Ig(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=Jq(h);t=NC(s);s=t>>>s;t=QI(s);}}else b<61700?b<61612?(s=hi1(h),t=X5(s),s=t>>>s,t=od(s)):t=z9(d):(s=V31(u),t=PB(a),s=t^s,r=Fb1(s));}else b<61958?b<61845?b<61766?(s=P21(u),t=Ob(a),u=T$(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=ke(u),t=fc1(a),s=t>>s,r=kn(s)):b<61846?(s=i8(t),u=n[c++],s?c+=u:P=9):(s=gK(m),t=cp(s),s=t<=s,r=pI(s)):b<61970?b<61959?(s=Zu(t),t=Bx(s),s=t==s,a=ae(s)):(s=oB(t),t=h8(s),s=t+s,s=W0(s)):(s=u6(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<68464){debugger;if(b<65163){debugger;if(b<63566){debugger;if(b<62778){debugger;if(b<62522){debugger;if(b<62246){debugger;b<62166?b<62026?(s=hS(t),t=dI(s),s=t/s,u=Ms(s)):(s=Le(r),t=Fm(h),s=t*s,_=Bt(s)):b<62167?t=lO(d):(s=$n(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<62345){debugger;b<62247?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=PD(r);z=[];for(t=0;t<s;t++){z.splice(0,0,pb(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=$a(a);}():(s=n[c++],t=Tp(u),u=y31(s),t[constantPool[s]]=u);}else if(b<62346){debugger;s=DO(r);t=n[c++];s?(c+=t,_=JM(s)):P=10;}else{debugger;t=$n1(d);}}else b<62678?b<62564?b<62523?s=cO(t):(r=w6(u),a=tA(!r)):b<62565?(s=L1(u),t=dt(s),s=t<s,s=Xn1(s)):(r=Tc1(s),t=r0(void r)):b<62722?b<62679?function(){debugger;}():(s=YG(u),t=vx(s),u=delete t[s],s=Rn(u)):s=Q11(t);}else if(b<63301){debugger;if(b<63123){debugger;b<62826?b<62779?(s=lg(h),t=AW(s),s=t>>>s,t=Vc1(s)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=kR(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Gf(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=vh(a);}():b<62827?(s=o6(u),t=Cx(s),t.push(s),s=pJ(t)):function(){w=[];u=n[c++];t=ro(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}();}else if(b<63243){debugger;if(b<63124){debugger;s=n[c++];t=n[c++];s=new RegExp(constantPool[s],constantPool[t]);a=Fy(s);}else{debugger;s=AQ(u);t=n[c++];!s?(c+=t,a=tg(s)):P=10;}}else{debugger;g=k4(s);p=EE(t);p.variablePool!=undefined?o(p,g,_):p[g]=s;}}else if(b<63465){debugger;if(b<63343){debugger;b<63302?(s=Py(t),t=QE(s),s=t-s,s=PM(s)):(s=gZ(u),t=al(s),u=delete t[s],s=zz(u));}else if(b<63344){debugger;(function(){s=e.CFf;for(t=0;t<s;t++)P=Mz(t),l[t]!=undefined?m[P]=l[t]:u=l[t];})();}else{debugger;return;}}else b<63535?b<63466?(s=h3(P),u=n[c++],s[constantPool[u]]+=1):(s=n[c++],t=Zb(s)):s=P4({});}else if(b<64459){debugger;if(b<64131){debugger;if(b<63867){debugger;if(b<63774){debugger;if(b<63567){debugger;s=mJ(P);u=n[c++];s[constantPool[u]]+=1;}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<63775?(s=sh(t),t=Ah(s),s=t+s,s=Lm(s)):(s=pw(u),t=bI(t),s=t<<s,s=JK(s));}else b<64042?b<63868?(s=Qb(a),t=t7(r),s=t-s,h=r_(s)):(s=jp(u),t=TX(s),u=delete t[s],s=Hy(u)):b<64043?(s=NT(u),t=D8(t),s=t<<s,s=Ql(s)):t=HY([]);}else if(b<64311){debugger;b<64235?b<64132?(s=vl(u),t=fl(s),s=t in s,s=WQ(s)):(s=mR(a),t=pR(r),s=t-s,h=v2(s)):b<64236?(s=g$(u),t=FP(u),s=t>s,t=fQ(s)):(s=at(),t=PN(),s=t!=s,U1(s));}else if(b<64383){debugger;if(b<64312){debugger;s=RH(r);t=n[c++];s?(c+=t,_=pL(s)):P=10;}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else{debugger;s=RS(t);t=eb1(s);s=t+s;s=xn(s);}}else if(b<64989){debugger;b<64869?b<64749?b<64460?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=nM(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Ti1(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=wQ(a);}():(s=AM(u),t=xu(a),u=A(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<64750?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=nn1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,rr(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Lv(a);}():(s=Iv(t),t=VU(u),s=t&s,u=gy(s)):b<64977?b<64870?(s=Fk(m),t=vI(s),s=t<=s,r=_M(s)):(s=n[c++],t=nA(s)):t=rT(d);}else if(b<65077){debugger;if(b<65025){debugger;b<64990?(s=I(u),t=iO(s),s=t in s,s=Z3(s)):(s=n[c++],c+=s);}else if(b<65026){debugger;s=wK(t);t=uo(u);s=t&s;u=LG(s);}else{debugger;s=me1(u);t=n[c++];!s?(c+=t,a=Jx(s)):P=10;}}else b<65132?b<65078?(s=aF(u),t=bn1(a),s=t%s,r=Si1(s)):t=nZ([]):(r=vz(u),s=Q(typeof r));}else if(b<66766){debugger;if(b<66105){debugger;b<65570?b<65504?b<65289?b<65164?(i=m,s=yp(m)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,RV()):a.splice(0,0,wr());}s=kg(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():b<65290?t=vi([]):(s=zQ(u),t=B8(a),s=t^s,r=ea(s)):b<65527?b<65505?(s=mw(t),t=I6(s),s=t/s,u=iw(s)):(s=Ei1(h),t=Zy(_),s=t|s,v=Lp(s)):b<65528?function(){s=e.CFf;for(t=0;t<s;t++)P=MW(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=Eg(t),t=M8(u),u=rf(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=Pp(u)):b<65921?b<65714?b<65571?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=ZL(s)):(s=Bd(t),t=l5(a),s=t instanceof s,_=XZ(s)):b<65715?function(){s=P01(t);t=fY(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=_c(s);}():(r=gs(u),s=Nb1(typeof r)):b<66064?b<65922?function(){w=[];u=n[c++];t=aH(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=$I(t),t=kt(u),u=WU(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=Wy(u)):(s=Ae1(u),t=BY(s),s=t in s,s=d7(s));}else if(b<66431){debugger;b<66300?b<66233?b<66106?(s=vX(P),u=n[c++],s[constantPool[u]]+=1):(s=n[c++],t=K5(constantPool[s])):b<66234?(s=Xi(u),t=Ik(a),s=t>>s,r=k1(s)):s=VQ(t):b<66395?b<66301?(r=fC(u),a=Nv(!r)):function(){debugger;}():(s=V4(r),t=xt(h),s=t*s,_=JI(s));}else if(b<66598){debugger;if(b<66564){debugger;if(b<66432){debugger;v=e.CFf;for(s=0;s<v;s++){_=VC(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=bP(t);t=iu(s);s=t+s;s=TC(s);}}else b<66565?(r=j2(u),a=GX(!r)):function(){s=KZ(t);throw s;}();}else if(b<66753){debugger;if(b<66599){debugger;s=T1(u);t=eX(a);s=t>>s;r=kK(s);}else{debugger;s=n11(r);t=n[c++];s?(c+=t,_=wH(s)):P=10;}}else{debugger;r=AZ(s);t=m5(void r);}}else if(b<67609){debugger;if(b<67243){debugger;if(b<66944){debugger;if(b<66929){debugger;if(b<66767){debugger;s=_F(h);t=n[c++];!s?c+=t:P=10;}else{debugger;s=TK(m);t=Wp(s);s=t<=s;r=yf(s);}}else b<66930?(s=Ju(u),t=WP(u),s=t>s,t=DV(s)):(s=Ar(h),t=Sx(s),s=t>>>s,t=rS(s));}else b<67228?b<66945?(s=n[c++],c+=s):t=Xt([]):function(){s=pA(t);t=l7(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=AU(s);}();}else b<67357?b<67318?b<67244?(s=bx(t),t=Of(s),s=t-s,s=u01(s)):s=z8({}):b<67319?(s=jq(t),t=fe1(s),s=t/s,u=U7(s)):(s=yA(h),t=Ce1(_),s=t|s,v=Dq(s)):b<67474?b<67358?(s=jw(r),t=BQ(P),s=t<s,h=ua(s)):(s=qw(),t=wq(),s=t!=s,hU(s)):(s=nB(u),t=Gx(s),u=delete t[s],s=Gl(u));}else if(b<67996){debugger;if(b<67796){debugger;if(b<67677){debugger;if(b<67610){debugger;s=dX(r);t=n[c++];s?(c+=t,_=jJ(s)):P=10;}else{debugger;s=hn({});}}else b<67678?function(){s=i01(t);throw s;}():(s=ib1(u),u=n[c++],s[constantPool[u]]-=1);}else b<67990?b<67797?(s=_i1(u),t=w11(a),s=t>>s,r=Kq(s)):(s=G_(a),t=eB(r),s=t-s,h=j7(s)):(s=lQ(t),t=e1(s),s=t===s,u=Im(s));}else if(b<68130){debugger;b<68094?b<67997?(s=_4(r),t=Hn(u),u=vi1(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(g=J31(s),p=Cg(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<68095?(s=V_(h),t=Cb(s),s=t>>>s,t=bu(s)):(r=MK(t),t=H4(-r));}else if(b<68328){debugger;b<68131?(s=$m(t),t=S9(u),s=t&s,u=O31(s)):(s=Gi(u),t=Yb1(a),s=t^s,r=Oi1(s));}else{debugger;s=f0(u);t=n[c++];!s?(c+=t,a=bT(s)):P=10;}}else if(b<70962){debugger;if(b<69917){debugger;if(b<69177){debugger;if(b<68800){debugger;b<68577?b<68527?b<68465?(s=X3(h),t=FA(s),s=t>>>s,t=q8(s)):function(){s=n[c++];u=zW(r);z=[];for(t=0;t<s;t++){z.splice(0,0,BB(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=uf(a);}():b<68528?(s=lu(r),t=ku(h),s=t*s,_=dK(s)):(s=a11(a),t=J3(r),s=t-s,h=vg(s)):b<68728?b<68578?t=uC([]):t=b7([]):b<68729?(r=Pm(t),t=L11(-r)):function(){debugger;}();}else if(b<69076){debugger;if(b<68935){debugger;if(b<68801){debugger;s=Pk(u);t=dN(a);s=t>>s;r=_P(s);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<68936){debugger;s=SC(u);t=n[c++];!s?(c+=t,a=ic(s)):P=10;}else{debugger;r=mK(u);s=DA(~r);}}else if(b<69160){debugger;b<69077?(s=sT(u),t=bg(s),u=delete t[s],s=DZ(u)):function(){s=oe(t);t=oQ(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=ys(s);}();}else{debugger;s=Io(h);t=n[c++];!s?c+=t:P=10;}}else if(b<69635){debugger;if(b<69277){debugger;b<69245?b<69178?(s=WK(r),t=O3(h),s=t!==s,s=LT(s)):(s=Vh(t),t=Up(s),s=t/s,u=K2(s)):b<69246?(s=CB(t),t=Ga(u),s=t&s,u=gS(s)):(s=QF(r),t=h01(h),s=t*s,_=zf(s));}else if(b<69626){debugger;b<69278?s=pP(t):(s=tL(t),t=aD(u),s=t&s,u=Ve(s));}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<69780){debugger;if(b<69761){debugger;if(b<69636){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;s=pT(u);t=_g(s);t.push(s);s=FR(t);}}else b<69762?(s=SO(u),t=bb1(t),s=t<<s,s=$E(s)):function(){s=Op(t);t=Jf(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=Uv(s);}();}else b<69840?b<69781?function(){s=e.CFf;for(t=0;t<s;t++)P=_B(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=Do(),t=Oy(),s=t!=s,HA(s)):(s=jN(u),t=dz(a),s=t^s,r=qE(s));}else b<70262?b<70022?b<69993?b<69957?b<69918?function(){s=g8(t);throw s;}():(i=m,s=C_(m)):b<69958?(s=O9(t),t=SM(s),s=t===s,u=R$(s)):(r=mx(u),s=e5(~r)):b<70016?b<69994?t=qX([]):(s=ep(P),u=n[c++],s[constantPool[u]]+=1):(s=n7(),t=jc(),s=t!=s,z6(s)):b<70143?b<70088?b<70023?(s=tK(P),u=n[c++],s[constantPool[u]]+=1):function(){s=n[c++];u=p7(r);z=[];for(t=0;t<s;t++){z.splice(0,0,M31(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=yC(a);}():b<70089?function(){s=bd(t);throw s;}():(s=n[c++],t=Pv(u),u=HS(s),t[constantPool[s]]=u):b<70202?b<70144?(s=gI(u),t=Qg(a),s=t%s,r=ZU(s)):(s=n[c++],c+=s):(s=dB(u),t=e_(u),s=t>s,t=X(s)):b<70781?b<70691?b<70515?b<70263?(r=ek(u),s=RN(~r)):(s=Jt(u),t=X7(s),u=delete t[s],s=T8(u)):b<70516?s=pe(t):(s=qU(t),t=xT(s),s=t/s,u=aY(s)):b<70762?b<70692?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,l21()):a.splice(0,0,yB());}s=uU(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=n[c++],t=EM(s)):(s=Km(u),t=$R(a),s=t%s,r=nQ(s)):b<70911?b<70846?b<70782?(s=fa(t),u=n[c++],s?c+=u:P=9):(g=Jo(s),p=Jb(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<70847?function(){debugger;}():s=_l({}):b<70951?b<70912?(r=vc(s),t=o01(void r)):(s=Ci(u),t=Rs(a),s=t%s,r=cz(s)):(s=Ac1(t),t=EF(u),u=cK(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=dn(u));}else if(b<72818){debugger;if(b<72111){debugger;if(b<71654){debugger;b<71461?b<71220?b<70963?(s=cM(u),t=Fp(s),s=t<s,s=fw(s)):(r=H9(s),t=IQ(void r)):b<71221?(s=uw(r),t=m6(h),s=t!==s,s=sB(s)):(i=m,s=Ns(m)):b<71511?b<71462?(s=re(t),t=fz(s),s=t===s,u=VR(s)):(s=U_(h),t=EK(s),s=t>>>s,t=i4(s)):b<71512?(s=aq(u),t=TQ(a),u=b4(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=gz(t),t=Sk(s),s=t==s,a=v3(s));}else if(b<71859){debugger;if(b<71764){debugger;b<71655?t=ti([]):function(){s=W9(t);t=dH(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=tt(s);}();}else if(b<71765){debugger;return;}else{debugger;s=h7(r);t=O8(u);u=Pb1(s);u.variablePool!=undefined?o(u,t,s):u[t]=s;}}else b<72037?b<71860?function(){w=[];u=n[c++];t=j3(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=Yd(r),t=mN(h),s=t!==s,s=HH(s)):(s=d21(t),t=Ss(s),s=t/s,u=IV(s));}else if(b<72379){debugger;if(b<72233){debugger;b<72206?b<72112?(s=lY(r),t=Bo(P),s=t<s,h=yn1(s)):(s=ln(h),t=vV(s),s=t>>>s,t=Ny(s)):b<72207?(s=Q4(P),u=n[c++],s[constantPool[u]]+=1):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,Bi()):a.splice(0,0,DE());}s=uG(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}();}else if(b<72297){debugger;if(b<72234){debugger;(function(){s=n[c++];u=KW(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Uy(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=qa(a);})();}else{debugger;s=fB(h);t=n[c++];!s?c+=t:P=10;}}else{debugger;s=nH(t);t=un(s);s=t===s;u=Ge1(s);}}else if(b<72704){debugger;if(b<72483){debugger;if(b<72380){debugger;return;}else{debugger;s=ft(u);t=k6(t);s=t<<s;s=dU(s);}}else b<72484?t=_j(d):(s=Rc1(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<72746){debugger;b<72705?(s=iX(m),t=A$(s),s=t<=s,r=kY(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=Ly(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}();}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<73437){debugger;b<73183?b<72973?b<72847?b<72819?(s=ok(t),u=n[c++],s?c+=u:P=9):(s=eb(u),t=jl(s),t.push(s),s=s8(t)):b<72848?(s=Be(u),t=PK(a),s=t^s,r=a_(s)):(s=eA(t),t=xx(a),s=t instanceof s,_=UI(s)):b<73085?b<72974?(s=n[c++],t=d0(constantPool[s])):(s=hO(u),t=Gz(u),s=t>s,t=Pn1(s)):function(){s=bV(t);t=Ch(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=Gw(s);}():b<73230?b<73195?b<73184?(s=gi1(u),t=AP(a),s=t^s,r=Og(s)):(r=YI(u),s=$b(typeof r)):b<73196?t=uQ(d):(s=sW(u),u=n[c++],s[constantPool[u]]-=1):b<73419?b<73231?(s=rc(u),t=O2(s),u=delete t[s],s=Gg(u)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=eR(r);z=[];for(t=0;t<s;t++){z.splice(0,0,ow(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=P$(a);}():(s=Yw(r),t=FY(u),u=Hq(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<73805){debugger;if(b<73684){debugger;if(b<73541){debugger;if(b<73438){debugger;(function(){w=[];u=n[c++];t=VB(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;})();}else{debugger;s=W7(r);t=n[c++];s?(c+=t,_=ZP(s)):P=10;}}else b<73542?(s=Aw(t),t=GT(s),s=t===s,u=T6(s)):(i=m,s=_7(m));}else b<73795?b<73685?function(){debugger;}():(s=qm(u),t=jB(s),s=t<s,s=a$(s)):(r=ip(s),t=nl(void r));}else b<73992?b<73857?b<73806?function(){s=ir(t);throw s;}():(s=n[c++],t=Js(constantPool[s])):b<73858?function(){s=n[c++];u=No(r);z=[];for(t=0;t<s;t++){z.splice(0,0,yq(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=Mn(a);}():(s=hJ(u),t=s6(a),u=pt(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<74025?b<73993?(s=J11(r),t=$O(P),s=t<s,h=Xk(s)):(r=x31(s),t=BG(void r)):(s=an(r),t=x11(h),s=t!==s,s=X4(s));}else if(b<87329){debugger;if(b<80809){debugger;if(b<77384){debugger;if(b<75945){debugger;if(b<75294){debugger;if(b<74768){debugger;b<74691?b<74372?b<74240?(s=n[c++],t=k7(s)):s=w5({}):b<74373?(s=o8(a),t=Bm(r),s=t-s,h=R0(s)):(s=vB(u),t=Kt(a),s=t%s,r=_q(s)):b<74708?b<74692?(s=$L(t),t=T11(s),s=t===s,u=sV(s)):(r=aj(u),s=R1(typeof r)):b<74709?s=Tb1(t):(s=Xz(u),t=oZ(a),s=t%s,r=yI(s));}else if(b<75250){debugger;b<75137?b<74769?t=$Q([]):(s=GD(h),t=gd(s),s=t>>>s,t=Vz(s)):b<75138?(s=m31(t),t=cy(s),s=t===s,u=CG(s)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,y5()):a.splice(0,0,S6());}s=Hd(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}();}else if(b<75260){debugger;b<75251?(s=n[c++],t=u7(s)):(s=G7(u),t=E8(s),u=delete t[s],s=B6(u));}else{debugger;s=dR(u);t=n[c++];!s?(c+=t,a=i1(s)):P=10;}}else if(b<75646){debugger;if(b<75506){debugger;if(b<75384){debugger;if(b<75295){debugger;t=DD([]);}else{debugger;s=o9(u);t=n[c++];!s?(c+=t,a=YW(s)):P=10;}}else b<75385?(s=iD(u),t=N8(a),s=t%s,r=d3(s)):(s=n[c++],t=Vi(u),u=Mk(s),t[constantPool[s]]=u);}else b<75588?b<75507?function(){s=n[c++];u=P8(r);z=[];for(t=0;t<s;t++){z.splice(0,0,k11(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=s11(a);}():function(){w=[];u=n[c++];t=sH(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=_1(t),t=Er(s),s=t/s,u=Pi1(s));}else b<75779?b<75694?b<75647?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,Hc()):a.splice(0,0,p11());}s=zV(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():function(){s=n[c++];u=tj(r);z=[];for(t=0;t<s;t++){z.splice(0,0,sa(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=Uu(a);}():b<75695?(s=r3(a),t=vo(s),s=t>=s,t=b9(s)):(s=g3(u),t=xK(a),s=t^s,r=iI(s)):b<75905?b<75780?function(){w=[];u=n[c++];t=hC(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=n[c++],t=i0(constantPool[s])):function(){s=_31(t);throw s;}();}else b<76689?b<76200?b<76105?b<75987?b<75946?(s=n[c++],t=hk(s)):(s=pB(t),t=Wt(s),s=t===s,u=wY(s)):b<75988?(s=n[c++],t=a9(u),u=Bw(s),t[constantPool[s]]=u):(s=kv(r),t=Sc(P),s=t<s,h=yi(s)):b<76171?b<76106?function(){debugger;}():function(){s=e.CFf;for(t=0;t<s;t++)P=SQ(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():b<76172?(s=ov(u),t=tM(t),s=t<<s,s=bC(s)):(s=vZ(t),t=uX(s),s=t===s,u=Mj(s)):b<76386?b<76267?b<76201?(r=r21(t),t=JJ(-r)):(g=Cz(s),p=zS(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<76268?(s=vH(t),t=TO(s),s=t/s,u=UC(s)):function(){w=[];u=n[c++];t=A9(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():b<76626?b<76387?(g=eh(s),p=Ez(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(r=KK(t),t=$w(-r)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=vG(s)):b<77114?b<76919?b<76847?b<76690?function(){s=e.CFf;for(t=0;t<s;t++)P=$3(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=VJ(t),u=n[c++],s?c+=u:P=9):b<76848?t=tC([]):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=yF(s)):b<77048?b<76920?(s=Yg(h),t=Hb1(_),s=t|s,v=S$(s)):t=s31(d):(s=eY(r),t=oy(u),u=Sn1(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<77298?b<77208?b<77115?(s=xo(h),t=Yb(_),s=t|s,v=dV(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=s21(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():b<77209?(s=TL(h),t=bR(s),s=t>>>s,t=$11(s)):(s=zP(a),t=lt(r),s=t-s,h=Kn(s)):b<77309?b<77299?(r=KH(t),t=ba(-r)):(s=S_(t),t=H11(a),s=t instanceof s,_=EA(s)):(r=eF(s),t=wn(void r));}else if(b<79184){debugger;if(b<78465){debugger;if(b<77854){debugger;if(b<77741){debugger;b<77503?b<77385?(r=Lc(t),t=a0(-r)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=u21(r);z=[];for(t=0;t<s;t++){z.splice(0,0,V11(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=zA(a);}():b<77504?(s=D1(u),t=pZ(u),s=t>s,t=qV(s)):(g=jt(s),p=Ls(t),p.variablePool!=undefined?o(p,g,_):p[g]=s);}else if(b<77822){debugger;b<77742?(s=yW(t),t=oa(s),s=t+s,s=de1(s)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=A7(s));}else if(b<77823){debugger;s=qc(a);t=wC(r);s=t-s;h=Bq(s);}else{debugger;s=rk(u);t=n[c++];!s?(c+=t,a=EU(s)):P=10;}}else b<78161?b<77952?b<77855?(s=JZ(m),t=cd(s),s=t<=s,r=KD(s)):(s=qB(u),t=x$(a),s=t>>s,r=Ce(s)):b<77953?(s=B2(u),t=ii(s),t.push(s),s=Yc(t)):(s=QQ(u),u=n[c++],s[constantPool[u]]-=1):b<78298?b<78162?(s=s7(u),t=Ps(s),s=t in s,s=WM(s)):(s=Te1(u),u=n[c++],s[constantPool[u]]-=1):(s=D6(a),t=e4(s),s=t>=s,t=Cm(s));}else if(b<78762){debugger;if(b<78668){debugger;b<78645?b<78466?(s=ci(t),t=f7(s),s=t==s,a=pi(s)):(s=Dc1(r),t=zl(P),s=t<s,h=qS(s)):b<78646?(s=TA(t),u=n[c++],s?c+=u:P=9):(s=n[c++],t=Iu(constantPool[s]));}else if(b<78756){debugger;if(b<78669){debugger;s=Eh(u);t=jx(s);s=t<s;s=Cn1(s);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=Z5(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else{debugger;s=X21(t);u=n[c++];s?c+=u:P=9;}}else b<79078?b<78943?b<78763?(i=m,s=I_(m)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=z$(s)):b<78944?(s=uk(t),t=Qp(s),s=t==s,a=mT(s)):function(){s=n[c++];u=m$(r);z=[];for(t=0;t<s;t++){z.splice(0,0,wz(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=W$(a);}():b<79177?b<79079?(s=pl(t),t=U8(s),s=t==s,a=JS(s)):(s=jA(h),t=Lh(s),s=t>>>s,t=tr(s)):(r=vk(u),s=GI(~r));}else if(b<80323){debugger;if(b<79833){debugger;if(b<79408){debugger;b<79284?b<79185?function(){s=mu(t);t=ic1(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=Aj(s);}():(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=tc1(s)):b<79285?(r=e0(u),s=Jz(~r)):(r=wk(u),a=eQ(!r));}else if(b<79807){debugger;b<79409?(s=dJ(t),t=Ji1(u),s=t&s,u=s1(s)):(s=fm(P),u=n[c++],s[constantPool[u]]+=1);}else{debugger;s=sb1(h);t=n[c++];!s?c+=t:P=10;}}else if(b<80186){debugger;if(b<80101){debugger;if(b<79834){debugger;return;}else{debugger;s=n[c++];t=n[c++];s=new RegExp(constantPool[s],constantPool[t]);a=__(s);}}else b<80102?(s=YN(h),t=lf(_),s=t|s,v=IN(s)):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=la(s));}else if(b<80271){debugger;if(b<80187){debugger;s=EP(r);t=n[c++];s?(c+=t,_=Rb1(s)):P=10;}else{debugger;(function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,c6()):a.splice(0,0,Rv());}s=Mx(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}})();}}else{debugger;s=Eo(u);u=n[c++];s[constantPool[u]]-=1;}}else if(b<80637){debugger;if(b<80521){debugger;if(b<80337){debugger;if(b<80324){debugger;s=Kx(h);t=n[c++];!s?c+=t:P=10;}else{debugger;s=$e1(r);t=UZ(P);s=t<s;h=W4(s);}}else b<80338?(s=n31(t),t=_C(s),s=t===s,u=nT(s)):function(){s=oS(t);t=AY(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=zG(s);}();}else b<80580?b<80522?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=ml(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Mb1(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=Mv(a);}():(s=vu(t),t=Yr(a),s=t instanceof s,_=C31(s)):(s=U(t),t=ne1(s),s=t==s,a=Oz(s));}else b<80749?b<80675?b<80638?(s=ll(u),t=bU(s),s=t<s,s=jj(s)):(r=t3(t),t=Pc1(-r)):b<80676?(s=mn(),t=Ui1(),s=t!=s,Z11(s)):(s=L9(u),t=u2(s),s=t in s,s=dA(s)):b<80804?b<80750?t=yJ([]):(s=XV(u),t=xi(a),s=t>>s,r=oe1(s)):(s=Ro(t),t=Vj(a),s=t instanceof s,_=Y(s));}else if(b<84006){debugger;if(b<82665){debugger;if(b<81661){debugger;if(b<81278){debugger;if(b<81057){debugger;if(b<80906){debugger;if(b<80810){debugger;r=cT(u);s=Tn1(typeof r);}else{debugger;s=da(u);t=n[c++];!s?(c+=t,a=Eq(s)):P=10;}}else b<80907?(s=Qs(a),t=NW(r),s=t-s,h=EC(s)):(s=Qt(u),t=zD(a),s=t^s,r=Lr(s));}else b<81230?b<81058?(r=Wn1(t),t=rZ(-r)):(s=NR(t),u=n[c++],s?c+=u:P=9):b<81231?(s=y7(u),t=UM(a),s=t>>s,r=c$(s)):(s=kX(r),t=cn1(u),u=aU(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else b<81474?b<81405?b<81279?(s=ke1(u),t=tE(a),s=t>>s,r=ji(s)):(s=qF(),t=U4(),s=t!=s,I11(s)):b<81406?(s=P2(t),t=F$(a),s=t instanceof s,_=GR(s)):(s=o21(u),t=ER(s),t.push(s),s=QR(t)):b<81519?b<81475?(s=rn(u),t=nN(s),s=t in s,s=eT(s)):(s=Dk(u),t=cH(t),s=t<<s,s=ZR(s)):(s=sg(r),t=Kf(P),s=t<s,h=x2(s));}else if(b<82255){debugger;if(b<82035){debugger;if(b<82020){debugger;if(b<81662){debugger;s=be(m);t=m11(s);s=t<=s;r=Y11(s);}else{debugger;s=U5(u);t=n[c++];!s?(c+=t,a=SL(s)):P=10;}}else b<82021?function(){s=n[c++];u=n6(r);z=[];for(t=0;t<s;t++){z.splice(0,0,My(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=dp(a);}():(s=G5(t),t=rK(u),u=ca(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=xL(u));}else b<82114?b<82036?(s=aA(P),u=n[c++],s[constantPool[u]]+=1):(r=s_(u),s=SJ(~r)):(s=aI(t),t=Td(u),u=l01(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=Ze(u));}else b<82535?b<82420?b<82256?(s=nL(t),t=Xj(s),s=t/s,u=OQ(s)):(i=m,s=Db1(m)):b<82421?(s=n[c++],t=yP(s)):t=C9(d):b<82625?b<82536?(s=M_(u),t=XK(s),u=delete t[s],s=fu(u)):(s=Iw(r),t=FH(h),s=t*s,_=ZH(s)):(s=TI(t),t=NI(a),s=t instanceof s,_=Yi(s));}else if(b<83379){debugger;if(b<83078){debugger;if(b<82888){debugger;if(b<82781){debugger;b<82666?(s=QS(),t=HZ(),s=t!=s,gJ(s)):(s=rE(r),t=x4(u),u=pK(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<82782){debugger;s=d5(r);t=oG(u);u=Dv(s);u.variablePool!=undefined?o(u,t,s):u[t]=s;}else{debugger;return;}}else if(b<82996){debugger;if(b<82889){debugger;s=zU(h);t=n[c++];!s?c+=t:P=10;}else{debugger;t=IK([]);}}else{debugger;s=NF(u);u=n[c++];s[constantPool[u]]-=1;}}else b<83184?b<83124?b<83079?(s=n[c++],t=HE(s)):(s=He1(h),t=Zk(_),s=t|s,v=W(s)):b<83125?(g=go(s),p=r7(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=C(a),t=Q31(r),s=t-s,h=t$(s)):b<83361?b<83185?(r=Z7(s),t=te(void r)):(s=kV(u),t=_5(s),s=t<s,s=g5(s)):(s=_2(t),t=vm(s),s=t-s,s=KU(s));}else if(b<83510){debugger;if(b<83423){debugger;if(b<83395){debugger;b<83380?(s=Uf(t),t=ta(a),s=t instanceof s,_=b2(s)):(s=yl(u),t=An1(s),t.push(s),s=ni1(t));}else if(b<83396){debugger;s=ck(u);u=n[c++];s[constantPool[u]]-=1;}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<83504){debugger;b<83424?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=RM(s)):(s=er(a),t=iZ(s),s=t>=s,t=yN(s));}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=AJ(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<83795?b<83735?b<83511?(s=jV(u),t=qY(a),s=t%s,r=xJ(s)):(s=ZO(u),t=N1(s),s=t<s,s=e31(s)):b<83736?s=ST(t):(r=Se(u),s=Qn1(~r)):b<83987?b<83796?(r=eD(s),t=VK(void r)):(s=n[c++],t=NH(constantPool[s])):(s=n[c++],t=VF(u),u=sp(s),t[constantPool[s]]=u);}else if(b<85162){debugger;if(b<84586){debugger;if(b<84410){debugger;b<84224?b<84079?b<84007?(g=fP(s),p=ge(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=n[c++],c+=s):b<84080?(s=T31(),t=rx(),s=t!=s,fp(s)):(s=Z1(t),t=Gq(u),s=t&s,u=O6(s)):b<84339?b<84225?(s=sR(r),t=xg(h),s=t!==s,s=Hl(s)):(s=oW(),t=rR(),s=t!=s,Gn(s)):b<84340?(s=Eb(u),u=n[c++],s[constantPool[u]]-=1):(r=q21(t),t=u9(-r));}else if(b<84514){debugger;if(b<84483){debugger;if(b<84411){debugger;r=Iy(u);s=t_(~r);}else{debugger;s=Gh(r);t=n[c++];s?(c+=t,_=Pw(s)):P=10;}}else b<84484?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=k2(s)):(s=QX(a),t=Ei(s),s=t>=s,t=Ho(s));}else b<84554?b<84515?(g=Ii1(s),p=M2(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=yr(u),t=fo(s),s=t<s,s=bE(s)):(s=tZ(h),t=vY(_),s=t|s,v=Od(s));}else if(b<84976){debugger;if(b<84671){debugger;if(b<84633){debugger;if(b<84587){debugger;s=Re(u);u=n[c++];s[constantPool[u]]-=1;}else{debugger;s=$h(u);t=n[c++];!s?(c+=t,a=es(s)):P=10;}}else b<84634?(s=WS(t),t=rI(u),s=t&s,u=hP(s)):(r=B_(t),t=Tc(-r));}else b<84965?b<84672?(s=VN(t),t=YZ(s),s=t-s,s=dy(s)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=cI(r);z=[];for(t=0;t<s;t++){z.splice(0,0,OL(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=BS(a);}():(r=jO(t),t=iS(-r));}else if(b<85059){debugger;b<85038?b<84977?s=Gd(t):(r=ID(u),s=ap(typeof r)):b<85039?(s=mS(t),t=VP(u),u=m1(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=st(u)):(s=_X(t),t=m8(u),u=LD(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=Hf(u));}else if(b<85140){debugger;if(b<85060){debugger;s=dv(a);t=Es(r);s=t-s;h=qn1(s);}else{debugger;s=wu(r);t=n[c++];s?(c+=t,_=H_(s)):P=10;}}else{debugger;r=Ou(s);t=cP(void r);}}else if(b<86147){debugger;if(b<85833){debugger;b<85301?b<85268?b<85163?s=oF({}):s=nd(t):b<85269?t=a4([]):(r=M9(t),t=DW(-r)):b<85359?b<85302?(s=Hu(u),t=ij(s),s=t in s,s=Vx(s)):(s=NS(P),u=n[c++],s[constantPool[u]]+=1):t=G1([]);}else if(b<85936){debugger;if(b<85883){debugger;b<85834?(s=_11(u),t=ws(s),u=delete t[s],s=IA(u)):(s=L21(t),t=s01(s),s=t-s,s=zL(s));}else if(b<85884){debugger;s=PH(r);t=n[c++];s?(c+=t,_=a8(s)):P=10;}else{debugger;s=JA(t);t=Gv(s);s=t/s;u=E21(s);}}else b<86081?b<85937?(s=R(u),t=he1(s),s=t<s,s=NG(s)):t=w21(d):(r=A8(u),s=XA(~r));}else if(b<86555){debugger;if(b<86327){debugger;if(b<86265){debugger;b<86148?(s=n[c++],c+=s):(s=hX(u),t=hc1(a),s=t%s,r=fZ(s));}else if(b<86266){debugger;s=x_(u);t=n[c++];!s?(c+=t,a=l$(s)):P=10;}else{debugger;s=TW(r);t=ei(h);s=t*s;_=VX(s);}}else b<86487?b<86328?function(){s=k0(t);throw s;}():function(){s=e.CFf;for(t=0;t<s;t++)P=l0(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=e$(),t=YX(),s=t!=s,cQ(s));}else if(b<86955){debugger;if(b<86887){debugger;if(b<86556){debugger;s=cR(r);t=n[c++];s?(c+=t,_=xU(s)):P=10;}else{debugger;s=n[c++];t=u31(constantPool[s]);}}else b<86888?(s=zN(u),t=vU(a),s=t^s,r=tz(s)):function(){s=$7(t);t=ds(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=F4(s);}();}else if(b<87261){debugger;if(b<86956){debugger;r=lT(u);s=fH(~r);}else{debugger;s=rX(r);t=n[c++];s?(c+=t,_=ga(s)):P=10;}}else{debugger;s=n[c++];t=y_(constantPool[s]);}}else if(b<93666){debugger;if(b<90046){debugger;if(b<88479){debugger;if(b<88012){debugger;if(b<87757){debugger;if(b<87580){debugger;b<87419?b<87330?(s=VW(u),t=pe1(u),s=t>s,t=Gi1(s)):(r=X11(u),s=Kr(typeof r)):b<87420?(s=V9(a),t=l9(s),s=t>=s,t=$e(s)):(s=ww(t),t=WW(u),u=gl(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=Ui(u));}else if(b<87644){debugger;b<87581?(i=m,s=hB(m)):t=MG(d);}else if(b<87645){debugger;s=fn1(u);t=n[c++];!s?(c+=t,a=Dg(s)):P=10;}else{debugger;s=v_(u);t=JG(s);u=delete t[s];s=h4(u);}}else if(b<87846){debugger;b<87829?b<87758?(s=yT(t),t=iK(s),s=t-s,s=y1(s)):(s=Ob1(r),t=sm(h),s=t!==s,s=gu(s)):b<87830?(s=FM(r),t=eW(P),s=t<s,h=Aq(s)):(s=J8(u),t=CZ(a),s=t%s,r=nS(s));}else if(b<87984){debugger;if(b<87847){debugger;s=Z4(r);t=Vb1(h);s=t!==s;s=vb(s);}else{debugger;s=Me1(h);t=n[c++];!s?c+=t:P=10;}}else{debugger;r=Gk(u);a=k3(!r);}}else if(b<88336){debugger;b<88224?b<88184?b<88013?(s=TV(r),t=Dc(P),s=t<s,h=f21(s)):(s=OD(u),u=n[c++],s[constantPool[u]]-=1):b<88185?(s=pS(u),u=n[c++],s[constantPool[u]]-=1):(s=Wf(u),t=HI(s),t.push(s),s=Fi(t)):b<88288?b<88225?(s=vC(u),t=Qw(s),s=t in s,s=aK(s)):(s=Nq(h),t=X31(s),s=t>>>s,t=jh(s)):(s=sk(h),t=RD(s),s=t>>>s,t=B3(s));}else if(b<88444){debugger;b<88381?b<88337?(s=tx(a),t=_a(r),s=t-s,h=Su(s)):(s=SP(u),t=wa(s),u=delete t[s],s=Zv(u)):b<88382?(s=hM(m),t=fM(s),s=t<=s,r=Zt(s)):(s=WR(u),t=Ej(s),u=delete t[s],s=ja(u));}else if(b<88459){debugger;b<88445?s=xB({}):(s=nk(t),t=qJ(s),s=t==s,a=pF(s));}else{debugger;s=R11(u);t=n[c++];!s?(c+=t,a=sP(s)):P=10;}}else if(b<89209){debugger;if(b<88925){debugger;if(b<88868){debugger;if(b<88610){debugger;b<88480?function(){s=YJ(t);throw s;}():(s=c0(a),t=pc1(s),s=t>=s,t=cb1(s));}else if(b<88611){debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}else{debugger;s=l6(u);t=ZQ(s);s=t in s;s=Th(s);}}else if(b<88915){debugger;b<88869?(s=QP(u),t=IX(a),s=t^s,r=I2(s)):(s=wn1(u),u=n[c++],s[constantPool[u]]-=1);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=FF(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else b<89146?b<89013?b<88926?function(){s=ot(t);throw s;}():(s=wo(r),t=Qz(h),s=t*s,_=qz(s)):b<89014?(s=CO(t),u=n[c++],s?c+=u:P=9):(g=q5(s),p=ey(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):b<89187?b<89147?(s=K7(h),t=ET(s),s=t>>>s,t=B11(s)):s=Qn({}):(g=oT(s),p=bH(t),p.variablePool!=undefined?o(p,g,_):p[g]=s);}else if(b<89802){debugger;if(b<89554){debugger;b<89349?b<89210?(s=Bj(u),t=uZ(s),s=t in s,s=PQ(s)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=Ic(r);z=[];for(t=0;t<s;t++){z.splice(0,0,O_(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=xz(a);}():b<89350?(s=Va(u),t=N_(a),s=t>>s,r=dc(s)):function(){w=[];u=n[c++];t=Sj(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}();}else if(b<89700){debugger;b<89555?(s=um(t),u=n[c++],s?c+=u:P=9):t=xa([]);}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<90020?b<89939?b<89803?(s=_V(a),t=O0(r),s=t-s,h=F3(s)):function(){s=OW(t);t=Ur(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=F6(s);}():b<89940?(r=Du(t),t=BR(-r)):(s=d31(P),u=n[c++],s[constantPool[u]]+=1):b<90037?b<90021?s=Tv({}):(s=JN(u),t=Mn1(a),u=Fx(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(r=kQ(u),s=m4(typeof r));}else if(b<91929){debugger;if(b<91095){debugger;if(b<90428){debugger;if(b<90231){debugger;b<90138?b<90047?function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=mp(r);z=[];for(t=0;t<s;t++){z.splice(0,0,a6(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=ze1(a);}():(r=Li(u),a=BX(!r)):b<90139?(s=EZ(u),t=Xn(a),u=R4(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=HW(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<90317){debugger;b<90232?(s=RP(),t=oo(),s=t!=s,CN(s)):(s=wZ(t),t=rw(a),s=t instanceof s,_=ki1(s));}else if(b<90318){debugger;s=c21(r);t=lm(h);s=t*s;_=wM(s);}else{debugger;s=US(r);t=n[c++];s?(c+=t,_=zd(s)):P=10;}}else if(b<90563){debugger;b<90444?b<90429?(s=A0(r),t=Gb1(P),s=t<s,h=Hw(s)):(s=Rw(m),t=Bs(s),s=t<=s,r=ci1(s)):b<90445?(s=vP(t),t=ZT(a),s=t instanceof s,_=xc(s)):function(){s=n[c++];u=Yh(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Fh(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=F_(a);}();}else if(b<91086){debugger;if(b<90564){debugger;s=C6();t=uI();s=t!=s;sS(s);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=Ll(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else{debugger;s=gQ(a);t=vc1(r);s=t-s;h=fv(s);}}else if(b<91512){debugger;if(b<91239){debugger;if(b<91212){debugger;if(b<91096){debugger;(function(){s=gx(t);throw s;})();}else{debugger;s=sN(h);t=n[c++];!s?c+=t:P=10;}}else if(b<91213){debugger;s=f5(u);t=n[c++];!s?(c+=t,a=mV(s)):P=10;}else{debugger;s=jf(h);t=n[c++];!s?c+=t:P=10;}}else b<91397?b<91240?(s=x5(t),t=hw(a),s=t instanceof s,_=nK(s)):function(){s=NJ(t);t=$W(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=CQ(s);}():(s=N6(u),u=n[c++],s[constantPool[u]]-=1);}else b<91629?b<91601?b<91513?function(){s=G(t);throw s;}():function(){debugger;}():b<91602?function(){debugger;}():s=ma(t):b<91866?b<91630?(s=n8(P),u=n[c++],s[constantPool[u]]+=1):(s=Al(t),t=dj(s),s=t==s,a=b8(s)):function(){s=f$(t);throw s;}();}else if(b<92819){debugger;if(b<92317){debugger;if(b<92102){debugger;if(b<92032){debugger;b<91930?(s=Ye1(h),t=no(s),s=t>>>s,t=Hx(s)):t=Oo([]);}else if(b<92033){debugger;s=Dn1(a);t=ue(s);s=t>=s;t=mM(s);}else{debugger;return;}}else b<92258?b<92103?function(){s=yR(t);t=an1(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=AK(s);}():(s=n[c++],t=o5(constantPool[s])):(s=yZ(u),t=hL(a),u=FL(r),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<92620){debugger;if(b<92519){debugger;if(b<92318){debugger;s=n[c++];t=n[c++];s=new RegExp(constantPool[s],constantPool[t]);a=HQ(s);}else{debugger;return;}}else if(b<92520){debugger;s=n[c++];c+=s;}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<92769?b<92621?(r=Xa(u),a=$(!r)):(s=je(t),t=rL(u),u=gF(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=wb1(u)):(r=mQ(u),s=L8(~r));}else b<93313?b<93214?b<93046?b<92820?(s=j9(u),t=qI(s),s=t<s,s=Y21(s)):(s=vv(h),t=t6(s),s=t>>>s,t=If(s)):b<93047?(s=Jn1(r),t=ZB(h),s=t!==s,s=SD(s)):(s=S21(t),t=nY(s),s=t===s,u=KP(s)):b<93312?b<93215?(s=_n(u),t=JQ(u),s=t>s,t=NE(s)):function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=YU(r);z=[];for(t=0;t<s;t++){z.splice(0,0,eK(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=ub1(a);}():function(){m["for_in_xh_cbb_list"]=w;s=n[c++];u=pg(r);z=[];for(t=0;t<s;t++){z.splice(0,0,rN(h));}offnew=1;u==RegExp?a=new RegExp(z[0],z[1]):a=new u(...z);offnew=0;s=l1(a);}():b<93443?b<93400?b<93314?t=tF([]):(s=X2(a),t=AA(s),s=t>=s,t=en1(s)):b<93401?(g=QB(s),p=d4(t),p.variablePool!=undefined?o(p,g,_):p[g]=s):(s=n[c++],t=Jw(u),u=YS(s),t[constantPool[s]]=u):b<93515?b<93444?(s=yc(t),t=QZ(s),s=t+s,s=h9(s)):(s=Zh(t),t=wi(u),s=t&s,u=tJ(s)):(s=Nk(u),t=WA(a),s=t>>s,r=Yp(s));}else if(b<96357){debugger;if(b<94706){debugger;if(b<94138){debugger;if(b<94014){debugger;if(b<93808){debugger;b<93763?b<93667?function(){w=[];u=n[c++];t=vs(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():(s=$31(r),t=R3(u),u=Y9(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<93764?(s=Z21(t),t=Ae(u),s=t&s,u=M5(s)):(s=L3(r),t=mP(h),s=t!==s,s=lb(s));}else if(b<93868){debugger;b<93809?(s=B5(h),t=ie1(_),s=t|s,v=a3(s)):(s=zR(u),t=K0(s),t.push(s),s=Fw(t));}else if(b<93869){debugger;s=qe(u);t=n[c++];!s?(c+=t,a=F7(s)):P=10;}else{debugger;s=Ty(t);}}else b<94096?b<94029?b<94015?(s=n[c++],t=DL(s)):function(){s=e.CFf;for(t=0;t<s;t++)P=HR(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():b<94030?(r=F(s),t=tH(void r)):function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,bW()):a.splice(0,0,Vb());}s=i_(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():b<94132?b<94097?function(){s=e.CFf;for(t=0;t<s;t++)P=e21(t),l[t]!=undefined?m[P]=l[t]:u=l[t];}():(s=Pf(t),t=z21(s),s=t+s,s=Mg(s)):function(){debugger;}();}else if(b<94546){debugger;if(b<94254){debugger;if(b<94216){debugger;if(b<94139){debugger;s=Cq(r);t=gE(h);s=t*s;_=XQ(s);}else{debugger;s=ud(u);t=n[c++];!s?(c+=t,a=UF(s)):P=10;}}else b<94217?(s=rg(u),t=y6(s),s=t<s,s=qM(s)):(s=Ye(P),u=n[c++],s[constantPool[u]]+=1);}else if(b<94438){debugger;if(b<94255){debugger;v=e.CFf;for(s=0;s<v;s++){_=eE(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=Qc1(m);t=WY(s);s=t<=s;r=ve(s);}}else{debugger;s=i2(t);t=mZ(s);s=t+s;s=_p(s);}}else if(b<94608){debugger;b<94600?b<94547?function(){s=n[c++];u=TN(r);z=[];for(t=0;t<s;t++){z.splice(0,0,S5(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=iq(a);}():(s=A21(t),t=p4(u),u=UR(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=fs(u)):b<94601?(s=qo(t),t=pm(s),s=t===s,u=o31(s)):(s=P31(t),t=GS(s),s=t+s,s=dF(s));}else if(b<94628){debugger;b<94609?t=mq(d):(s=XN(u),t=si(a),s=t>>s,r=iJ(s));}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else if(b<95521){debugger;if(b<95129){debugger;b<94925?b<94761?b<94707?(s=JL(t),t=OE(s),s=t+s,s=LA(s)):(s=EB(h),t=kH(_),s=t|s,v=FO(s)):b<94762?s=_t({}):(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=Q7(s)):b<95046?b<94926?function(){s=p3(t);t=pU(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=hA(s);}():(i=m,s=oD(m)):(s=n[c++],c+=s);}else if(b<95340){debugger;b<95280?b<95130?(s=N21(u),t=lN(t),s=t<<s,s=Lf(s)):function(){w=[];u=n[c++];t=pO(t);for(s in t){w.push(s);}m["for_in_xh_cbb_list"+u]=w;}():b<95281?(s=FE(u),t=E4(a),s=t>>s,r=_i(s)):(s=w7(t),t=AI(s),s=t==s,a=xM(s));}else if(b<95520){debugger;if(b<95341){debugger;s=c8(h);t=n[c++];!s?c+=t:P=10;}else{debugger;s=rJ(r);t=n[c++];s?(c+=t,_=eO(s)):P=10;}}else{debugger;s=tn(h);t=n[c++];!s?c+=t:P=10;}}else if(b<95837){debugger;b<95785?b<95746?b<95522?function(){s=C0(t);t=gT(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=lH(s);}():(r=iC(u),s=Tf(typeof r)):b<95747?(s=z11(u),t=rV(a),u=B1(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=cA(t),t=RU(s),s=t+s,s=s5(s)):b<95828?b<95786?function(){s=RO(t);t=Yn1(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=Uh(s);}():(s=n[c++],t=KI(s)):(s=UT(t),t=bp(s),s=t-s,s=N31(s));}else if(b<95956){debugger;if(b<95871){debugger;b<95838?(s=Xr(u),t=LM(s),s=t in s,s=tT(s)):(s=Zb1(t),t=hW(s),s=t==s,a=sM(s));}else if(b<95872){debugger;r=hb1(s);t=Db(void r);}else{debugger;v=e.CFf;for(s=0;s<v;s++){_=kI(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}}else if(b<96320){debugger;if(b<95957){debugger;return;}else{debugger;s=DU(t);t=sF(s);s=t+s;s=S(s);}}else{debugger;s=qn(u);t=Pu(a);u=XB(r);u.variablePool!=undefined?o(u,t,s):u[t]=s;}}else if(b<98208){debugger;if(b<96995){debugger;if(b<96796){debugger;if(b<96592){debugger;if(b<96465){debugger;if(b<96358){debugger;s=Xg(h);t=n[c++];!s?c+=t:P=10;}else{debugger;s=AG(t);t=Lc1(a);s=t instanceof s;_=hd(s);}}else b<96466?(s=JX(r),t=yX(h),s=t!==s,s=XE(s)):(s=Z$(a),t=w$(s),s=t>=s,t=i11(s));}else if(b<96622){debugger;if(b<96593){debugger;(function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,S4()):a.splice(0,0,Mw());}s=RQ(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}})();}else{debugger;t=n[c++];u=n[c++];a=n[c++];try{h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});c=t+c;if(h=="-90_cbb"){debugger;return h;}}catch(b){_=b;c=t+c;h=cbb_jsvmp(s,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,args:z,argsList:l,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}}finally{if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=u+c;h=cbb_jsvmp(u,c,c,e,z.length,1,{shuz:n,cbbb:m,allthis:d,argsList:l,args:z,duei:e,all:i,a7:_});if(h=="-90_cbb"){debugger;return h;}else{debugger;P=10;}c=c+a;}}}else b<96623?function(){s=n[c++]*2;u=[];a=[];for(t=0;t<s;t++){t<s/2?u.splice(0,0,qQ()):a.splice(0,0,ub());}s=hG(t);for(t=0;t<u.length;t++){if(a[t]==s){debugger;c+=u[t];break;}else if(a[t]==null){debugger;c+=u[t];break;}else{debugger;P=10;}}}():(s=Vs(t),t=YQ(s),s=t-s,s=JC(s));}else if(b<96924){debugger;if(b<96829){debugger;if(b<96797){debugger;s=b01(r);t=n[c++];s?(c+=t,_=Ap(s)):P=10;}else{debugger;g=uv(s);p=bX(t);p.variablePool!=undefined?o(p,g,_):p[g]=s;}}else b<96830?s=wf(t):function(){s=n[c++];u=Oc1(r);z=[];for(t=0;t<s;t++){z.splice(0,0,Kw(h));}let e=false;for(let b of Object.getOwnPropertyNames(window)){if(typeof window[b]==="function"&&u===window[b]){debugger;a=window[b](...z);e=true;break;}}if(!e){debugger;a=u.apply(i,z);}t=ei1(a);}();}else b<96968?b<96925?(s=n[c++],t=CU(constantPool[s])):(r=zk(t),t=kC(-r)):(s=qr(t),u=n[c++],s?c+=u:P=9);}else if(b<97488){debugger;if(b<97177){debugger;b<97096?b<96996?(r=wN(u),s=H1(typeof r)):(s=yM(m),t=MJ(s),s=t<=s,r=Xe(s)):b<97097?(s=MA(h),t=gW(s),s=t>>>s,t=k8(s)):(s=yH(h),t=nJ(_),s=t|s,v=fk(s));}else if(b<97341){debugger;if(b<97178){debugger;v=e.CFf;for(s=0;s<v;s++){_=ZD(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=Ph(t);t=It(s);s=t===s;u=Uz(s);}}else{debugger;s=Yk(r);t=EI(h);s=t*s;_=$y(s);}}else b<97890?b<97703?b<97489?s=z7(t):(s=nb1(t),t=X_(s),s=t===s,u=bA(s)):b<97704?(s=n[c++],t=n[c++],s=new RegExp(constantPool[s],constantPool[t]),a=OT(s)):(s=ld(r),t=m3(P),s=t<s,h=rz(s)):b<98177?b<97891?(s=tI(u),t=Wb(u),s=t>s,t=jS(s)):(s=Oh(r),t=gH(u),u=nX(s),u.variablePool!=undefined?o(u,t,s):u[t]=s):(s=LH(t),t=jn1(s),s=t/s,u=zO(s));}else if(b<99029){debugger;if(b<98730){debugger;if(b<98593){debugger;if(b<98393){debugger;b<98209?(s=L$(),t=aE(),s=t!=s,Nl(s)):s=Ry(t);}else if(b<98394){debugger;i=m;s=Ih(m);}else{debugger;s=Ni(h);t=n[c++];!s?c+=t:P=10;}}else b<98663?b<98594?(s=UD(t),t=JU(u),u=UL(a),u.variablePool!=undefined?o(u,t,s):u[t]=s,s=zb(u)):(s=A5(t),t=QN(s),s=t/s,u=Qe1(s)):(i=m,s=fS(m));}else b<98908?b<98860?b<98731?(s=n[c++],c+=s):(s=wU(u),t=Qo(t),s=t<<s,s=DG(s)):b<98861?(s=Sr(t),t=Qa(s),s=t-s,s=fT(s)):(s=JV(u),t=Pd(a),u=UP(r),u.variablePool!=undefined?o(u,t,s):u[t]=s):b<98963?b<98909?(r=gw(u),s=QV(~r)):(s=_e(t),t=V$(s),s=t-s,s=PR(s)):(s=h5(r),t=JH(u),u=Yc1(s),u.variablePool!=undefined?o(u,t,s):u[t]=s);}else if(b<99252){debugger;b<99211?b<99178?b<99030?t=CW(d):(s=IR(u),t=Mb(s),s=t<s,s=Ts(s)):b<99179?(s=pv(t),t=KS(u),s=t&s,u=tB(s)):function(){s=q7(t);t=Np(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=jn(s);}():b<99229?b<99212?(s=K31(r),t=QY(h),s=t!==s,s=fn(s)):(s=fd(u),t=z1(s),s=t<s,s=ZS(s)):(s=qq(t),u=n[c++],s?c+=u:P=9);}else if(b<99354){debugger;b<99305?b<99253?function(){s=ko(t);t=Nb(u);try{s=t[s];}catch(b){s=window[s];}i=t;s=Y8(s);}():(s=UQ(t),t=Jj(a),s=t instanceof s,_=YR(s)):b<99306?function(){debugger;}():(s=uj(a),t=rB(s),s=t>=s,t=ae1(s));}else if(b<99732){debugger;if(b<99355){debugger;v=e.CFf;for(s=0;s<v;s++){_=NY(s);let c=_;i[c]=function(){let e=new cshduei();var b=this;if(offnew==1){debugger;offnew=0;P={variablePool:{},arguments:arguments,zhili:[]};P.__proto__=m;cltothis(P.variablePool,changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);return b;}else{debugger;P={variablePool:{},arguments:arguments,zhili:[]};cltothis(P["variablePool"],changlc[m.variablePool[c]].variablePool);cltothis(P,P["variablePool"],1);cltothis(P["zhili"],changlc[m.variablePool[c]].zhili);P.__proto__=m;h=cbb_jsvmp(P,e,0,P["zhili"],arguments,b);}if(e.CFf==0){debugger;return undefined;}else{debugger;let b=e.cf(s);for(;1==1;){if(e.CFf==0){debugger;break;}else{debugger;P=e.cf(s);}}return b;}};}}else{debugger;s=BM(u);t=n[c++];!s?(c+=t,a=c2(s)):P=10;}}else{debugger;s=NL(h);t=n[c++];!s?c+=t:P=10;}}}if(!this.window){debugger;var window={exports:exports,require:require,module:module,__dirname:__dirname,__filename:__filename};window.__proto__=global;}offnew=0;window["variablePool"]={};window["zhili"]=[];cltothis(window["variablePool"],changlc.awcbb_yhh_fun0.variablePool);cltothis(window["zhili"],changlc.awcbb_yhh_fun0.zhili);cbb_jsvmp(window,new cshduei(),0,changlc.awcbb_yhh_fun0.zhili);
27182812/ChatGLM-LLaMA-chinese-insturct
16,664
src/transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py
# coding=utf-8 # Copyright 2022 ABEJA, Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenization classes for GPTNeoXJapanese.""" import collections import json import os import re from typing import TYPE_CHECKING, List, Optional, Tuple import numpy as np from ...tokenization_utils_fast import PreTrainedTokenizer from ...utils import logging if TYPE_CHECKING: from transformers.pipelines.conversational import Conversation logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt", "emoji_file": "emoji.json"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "abeja/gpt-neox-japanese-2.7b": "https://huggingface.co/abeja/gpt-neox-japanese-2.7b/resolve/main/vocab.txt", }, "emoji_file": { "abeja/gpt-neox-japanese-2.7b": "https://huggingface.co/abeja/gpt-neox-japanese-2.7b/resolve/main/emoji.json", }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "abeja/gpt-neox-japanese-2.7b": 2048, } def load_vocab_and_emoji(vocab_file, emoji_file): """Loads a vocabulary file and emoji file into a dictionary.""" with open(emoji_file, "r", encoding="utf-8") as f: emoji = json.loads(f.read()) vocab = collections.OrderedDict() raw_vocab = collections.OrderedDict() ids_to_tokens = collections.OrderedDict() with open(vocab_file, "r", encoding="utf-8") as f: token = f.readlines() token = [[t.rstrip("\n")] if (t == "," or "," not in t) else t.rstrip("\n").split(",") for t in token] for idx, b in enumerate(token): ids_to_tokens[idx] = b raw_vocab[",".join(b)] = idx for wd in b: vocab[wd] = idx return vocab, raw_vocab, ids_to_tokens, emoji class GPTNeoXJapaneseTokenizer(PreTrainedTokenizer): """ This tokenizer inherits from [`PreTrainedTokenizer`] and is based on Japanese special Sub-Word-Encoding that is used in this repository (https://github.com/tanreinama/Japanese-BPEEncoder_V2). Check the repository for details. Japanese has a relatively large vocabulary and there is no separation between words. Furthermore, the language is a combination of hiragana, katakana, and kanji, and variants such as "1" and "①" are often used. In order to cope with these, this tokenizer has the following features - Subword-by-subword segmentation, which is intermediate between byte strings and morphological analysis. - BPEs are created for each Kanji, Hiragana, and Katakana character, and there are no BPEs that cross character types, such as Kanji + Hiragana or Hiragana + Katakana. - All-byte encoding that does not require <unk>. - Independent of UTF codes such as 2-byte and 3-byte characters - Conversion of heterographs to the same token_id - Emoji and Emoticon are grouped into 12 types as special tags. Example: ```python >>> from transformers import GPTNeoXJapaneseTokenizer >>> tokenizer = GPTNeoXJapaneseTokenizer.from_pretrained("abeja/gpt-neox-japanese-2.7b") >>> # You can confirm both 慶応 and 慶應 are encoded to 17749 >>> tokenizer("吾輩は猫である🐯。実は慶応(慶應)大学出身")["input_ids"] [30014, 26883, 26638, 27228, 25, 26650, 31732, 31679, 27809, 26638, 17749, 31592, 17749, 31593, 321, 1281] >>> # Both 慶応 and 慶應 are decoded to 慶応 >>> tokenizer.decode(tokenizer("吾輩は猫である🐯。実は慶応(慶應)大学出身")["input_ids"]) '吾輩は猫である🐯。実は慶応(慶応)大学出身' ``` Args: vocab_file (`str`): File containing the vocabulary. emoji_file (`str`): File containing the emoji. unk_token (`str`, *optional*, defaults to `"<|endoftext|>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. pad_token (`str`, *optional*, defaults to `"<|endoftext|>"`): The token used for padding bos_token (`str`, *optional*, defaults to `"<|startoftext|>"`): The beginning of sequence token. eos_token (`str`, *optional*, defaults to `"<|endoftext|>"`): The end of sequence token. do_clean_text (`bool`, *optional*, defaults to `False`): Whether or not to clean text for URL, EMAIL, TEL, Japanese DATE and Japanese PRICE. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES model_input_names = ["input_ids", "attention_mask"] def __init__( self, vocab_file, emoji_file, unk_token="<|endoftext|>", pad_token="<|endoftext|>", bos_token="<|startoftext|>", eos_token="<|endoftext|>", do_clean_text=False, **kwargs, ): super().__init__( unk_token=unk_token, pad_token=pad_token, bos_token=bos_token, eos_token=eos_token, do_clean_text=do_clean_text, **kwargs, ) if not os.path.isfile(vocab_file): raise ValueError( f"Can't find a vocabulary file at path '{vocab_file}'. To load the vocabulary from a Google pretrained" " model use `tokenizer = GPTNeoXJapaneseokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`" ) if not os.path.isfile(emoji_file): raise ValueError( f"Can't find a emoji file at path '{emoji_file}'. To load the emoji information from a Google" " pretrained model use `tokenizer = GPTNeoXJapaneseokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`" ) self.do_clean_text = do_clean_text self.vocab, self.raw_vocab, self.ids_to_tokens, self.emoji = load_vocab_and_emoji(vocab_file, emoji_file) self.subword_tokenizer = SubWordJapaneseTokenizer( vocab=self.vocab, ids_to_tokens=self.ids_to_tokens, emoji=self.emoji ) @property def vocab_size(self): # self.vocab contains support for character fluctuation unique to Japanese, and has a large number of vocab return len(self.raw_vocab) def get_vocab(self): return dict(self.raw_vocab, **self.added_tokens_encoder) def _tokenize(self, text): return self.subword_tokenizer.tokenize(text, clean=self.do_clean_text) def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" return self.vocab.get(token, self.vocab.get(self.unk_token)) def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" return self.subword_tokenizer.convert_id_to_token(index) def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (string) in a single string.""" out_string = "".join(tokens).strip() return out_string def _build_conversation_input_ids(self, conversation: "Conversation") -> List[int]: """This corresponds to DialoGPT variants of models.""" input_ids = [] for is_user, text in conversation.iter_texts(): input_ids.extend(self.encode(text, add_special_tokens=False) + [self.eos_token_id]) if len(input_ids) > self.model_max_length: input_ids = input_ids[-self.model_max_length :] return input_ids def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: index = 0 if os.path.isdir(save_directory): vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) emoji_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["emoji_file"] ) else: vocab_file = ( (filename_prefix + "-" if filename_prefix else "") + save_directory + VOCAB_FILES_NAMES["vocab_file"] ) emoji_file = ( (filename_prefix + "-" if filename_prefix else "") + save_directory + VOCAB_FILES_NAMES["emoji_file"] ) with open(vocab_file, "w", encoding="utf-8") as writer: for token_index, token in self.ids_to_tokens.items(): if index != token_index: logger.warning( f"Saving vocabulary to {vocab_file}: vocabulary indices are not consecutive." " Please check that the vocabulary is not corrupted!" ) index = token_index writer.write(",".join(token) + "\n") index += 1 with open(emoji_file, "w", encoding="utf-8") as writer: json.dump(self.emoji, writer) return vocab_file, emoji_file class SubWordJapaneseTokenizer(object): """ https://github.com/tanreinama/Japanese-BPEEncoder_V2 This tokenizer class is under MIT Lisence according to the original repository. MIT License Copyright (c) 2020 tanreinama Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ def __init__(self, vocab, ids_to_tokens, emoji): self.vocab = vocab # same as swe self.ids_to_tokens = ids_to_tokens # same as bpe self.emoji = emoji self.maxlen = np.max([len(w) for w in self.vocab.keys()]) self.content_repatter1 = re.compile(r"(https?|ftp)(:\/\/[-_\.!~*\'()a-zA-Z0-9;\/?:\@&=\+$,%#]+)") self.content_repatter2 = re.compile(r"[A-Za-z0-9\._+]*@[\-_0-9A-Za-z]+(\.[A-Za-z]+)*") self.content_repatter3 = re.compile(r"[\(]{0,1}[0-9]{2,4}[\)\-\(]{0,1}[0-9]{2,4}[\)\-]{0,1}[0-9]{3,4}") self.content_repatter4 = re.compile( r"([12]\d{3}[/\-年])*(0?[1-9]|1[0-2])[/\-月]((0?[1-9]|[12][0-9]|3[01])日?)*(\d{1,2}|:|\d{1,2}時|\d{1,2}分|\(日\)|\(月\)|\(火\)|\(水\)|\(木\)|\(金\)|\(土\)|㈰|㈪|㈫|㈬|㈭|㈮|㈯)*" ) self.content_repatter5 = re.compile( r"(明治|大正|昭和|平成|令和|㍾|㍽|㍼|㍻|\u32ff)\d{1,2}年(0?[1-9]|1[0-2])月(0?[1-9]|[12][0-9]|3[01])日(\d{1,2}|:|\d{1,2}時|\d{1,2}分|\(日\)|\(月\)|\(火\)|\(水\)|\(木\)|\(金\)|\(土\)|㈰|㈪|㈫|㈬|㈭|㈮|㈯)*" ) self.content_repatter6 = re.compile( r"((0|[1-9]\d*|[1-9]\d{0,2}(,\d{3})+)*億)*((0|[1-9]\d*|[1-9]\d{0,2}(,\d{3})+)*万)*((0|[1-9]\d*|[1-9]\d{0,2}(,\d{3})+)*千)*(0|[1-9]\d*|[1-9]\d{0,2}(,\d{3})+)*(千円|万円|千万円|円|千ドル|万ドル|千万ドル|ドル|千ユーロ|万ユーロ|千万ユーロ|ユーロ)+(\(税込\)|\(税抜\)|\+tax)*" ) keisen = "─━│┃┄┅┆┇┈┉┊┋┌┍┎┏┐┑┒┓└┕┖┗┘┙┚┛├┝┞┟┠┡┢┣┤┥┦┧┨┩┪┫┬┭┮┯┰┱┲┳┴┵┶┷┸┹┺┻┼┽┾┿╀╁╂╃╄╅╆╇╈╉╊╋╌╍╎╏═║╒╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡╢╣╤╥╦╧╨╩╪╫╬╭╮╯╰╱╲╳╴╵╶╷╸╹╺╻╼╽╾╿" blocks = "▀▁▂▃▄▅▆▇█▉▊▋▌▍▎▏▐░▒▓▔▕▖▗▘▙▚▛▜▝▞▟" self.content_trans1 = str.maketrans({k: "<BLOCK>" for k in keisen + blocks}) def __len__(self): return len(self.ids_to_tokens) def clean_text(self, content): content = self.content_repatter1.sub("<URL>", content) content = self.content_repatter2.sub("<EMAIL>", content) content = self.content_repatter3.sub("<TEL>", content) content = self.content_repatter4.sub("<DATE>", content) content = self.content_repatter5.sub("<DATE>", content) content = self.content_repatter6.sub("<PRICE>", content) content = content.translate(self.content_trans1) while "<BLOCK><BLOCK>" in content: content = content.replace("<BLOCK><BLOCK>", "<BLOCK>") return content def tokenize(self, text, clean=False): text = text.replace(" ", "<SP>") text = text.replace(" ", "<SP>") text = text.replace("\r\n", "<BR>") text = text.replace("\n", "<BR>") text = text.replace("\r", "<BR>") text = text.replace("\t", "<TAB>") text = text.replace("—", "ー") text = text.replace("−", "ー") for k, v in self.emoji["emoji"].items(): if k in text: text = text.replace(k, v) if clean: text = self.clean_text(text) def check_simbol(x): e = x.encode() if len(x) == 1 and len(e) == 2: c = (int(e[0]) << 8) + int(e[1]) if ( (c >= 0xC2A1 and c <= 0xC2BF) or (c >= 0xC780 and c <= 0xC783) or (c >= 0xCAB9 and c <= 0xCBBF) or (c >= 0xCC80 and c <= 0xCDA2) ): return True return False def checku2e(x): e = x.encode() if len(x) == 1 and len(e) == 3: c = (int(e[0]) << 16) + (int(e[1]) << 8) + int(e[2]) if c >= 0xE28080 and c <= 0xE2B07F: return True return False pos = 0 result = [] while pos < len(text): end = min(len(text), pos + self.maxlen + 1) if text[pos] == "<" else pos + 3 candidates = [] # (token_id, token, pos) for e in range(end, pos, -1): wd = text[pos:e] if wd in self.vocab: if wd[0] == "<" and len(wd) > 2: candidates = [(self.vocab[wd], wd, e)] break else: candidates.append((self.vocab[wd], wd, e)) if len(candidates) > 0: # the smallest token_id is adopted _, wd, e = sorted(candidates, key=lambda x: x[0])[0] result.append(wd) pos = e else: end = pos + 1 wd = text[pos:end] if check_simbol(wd): result.append("<KIGOU>") elif checku2e(wd): result.append("<U2000U2BFF>") else: for i in wd.encode("utf-8"): result.append("<|byte%d|>" % i) pos = end return result def convert_id_to_token(self, index, breakline="\n"): words = [] byte_tokens = [] word = self.ids_to_tokens[index][0] if word[:6] == "<|byte" and word[-2:] == "|>": byte_tokens.append(int(word[6:-2])) else: if len(byte_tokens) > 0: words.append(bytearray(byte_tokens).decode("utf-8", errors="replace")) byte_tokens = [] if word[:7] == "<|emoji" and word[-2:] == "|>": words.append(self.emoji["emoji_inv"][word]) elif word == "<SP>": words.append(" ") elif word == "<BR>": words.append(breakline) elif word == "<TAB>": words.append("\t") elif word == "<BLOCK>": words.append("▀") elif word == "<KIGOU>": words.append("ǀ") elif word == "<U2000U2BFF>": words.append("‖") else: words.append(word) if len(byte_tokens) > 0: words.append(bytearray(byte_tokens).decode("utf-8", errors="replace")) text = "".join(words) return text
2833844911/cy_jsvmp
2,009
src/t3.js
// 线程版例子 var a = 0 var ad = 29 var f = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1] // “cbb_”开头的是特殊函数(特殊函数才可以异步) function cbb_hu(e,b){ for (var i =0; i < e.length; i++){ for (var u=0;u < b % 10; u++){ a = u; } if (ad + 1 === 1){ ad = 29 } cbb_prgnx() console.log(ad, e[i] + (b % ad) *3, f[ad], ad) f[ad] =f[ad] + e[i] + (b % ad) *3 cbb_prgunx() ad -= 1 } return 90 } function cbb_hu2(e,b){ for (var i =0; i < e.length; i++){ for (var u=0;u < b % 20; u++){ a = u; } if (ad + 1 === 1){ ad = 29 } cbb_prgnx() console.log(ad, e[i] + b % ad, f[ad], ad) f[ad] = f[ad] + e[i] + b % ad cbb_prgunx() ad -= 1 } return 90 } function cbb_op(e,b){ for (var i =0; i < e.length; i++){ for (var u=0;u < b % 30; u++){ a = u; } if (ad + 1 === 1){ ad = 29 } // 线程锁开启 cbb_prgnx() console.log(29-ad, e[i] + ad + b, f[29 - ad], ad) f[29 - ad] = f[29 - ad] + e[i] + ad + b // 线程锁关闭 cbb_prgunx() ad -= 1 } return 0 } function sign(){ var mm_ = "2833844911" var mm2_ = "udiaudisaoduas" var code_ = "565767" var mm = mm_.split('') for (var i =0; i< mm.length; i++){ mm[i] = mm[i].charCodeAt(0) } var mm2 = mm2_.split('') for (i =0; i< mm2.length; i++){ mm2[i] = mm2[i].charCodeAt(0) } var code = code_.split('') for (i =0; i< code.length; i++){ code[i] = code[i].charCodeAt(0) } debugger var s = cbb_op(mm, 30); console.log("dadasd",s) // 开始线程 cbb_prg(cbb_op(mm, 30), cbb_hu(mm2, 6786), cbb_hu2(code, 76)) var dt = '' for (i = 0 ; i<30; i++){ dt += f[i] } console.log(f) var s = {"zhaohao": mm_,"mima":mm2_,"code":code_,"sign":dt} return s } if (window){ window.sign = sign; }else { global.sign = sign; } var tw = sign() debugger console.log(tw) console.log(JSON.stringify(tw))
2833844911/cy_jsvmp
1,931
src/test.js
var a = 0 var ad = 29 var f = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1] function cbb_hu(e,b){ for (var i =0; i < e.length; i++){ for (var u=0;u < b % 10; u++){ a = u; } if (ad + 1 === 1){ ad = 29 } cbb_prgnx() console.log(ad, e[i] + (b % ad) *3, f[ad], ad) f[ad] =f[ad] + e[i] + (b % ad) *3 cbb_prgunx() ad -= 1 } return 90 } function cbb_hu2(e,b){ for (var i =0; i < e.length; i++){ for (var u=0;u < b % 20; u++){ a = u; } if (ad + 1 === 1){ ad = 29 } cbb_prgnx() console.log(ad, e[i] + b % ad, f[ad], ad) f[ad] = f[ad] + e[i] + b % ad cbb_prgunx() ad -= 1 } return 90 } function cbb_op(e,b){ for (var i =0; i < e.length; i++){ for (var u=0;u < b % 30; u++){ a = u; } if (ad + 1 === 1){ ad = 29 } cbb_prgnx() console.log(29-ad, e[i] + ad + b, f[29 - ad], ad) f[29 - ad] = f[29 - ad] + e[i] + ad + b cbb_prgunx() ad -= 1 } return 0 } function sign(){ var mm_ = "2833844911" var mm2_ = "udiaudisaoduas" var code_ = "565767" var mm = mm_.split('') for (var i =0; i< mm.length; i++){ mm[i] = mm[i].charCodeAt(0) } var mm2 = mm2_.split('') for (i =0; i< mm2.length; i++){ mm2[i] = mm2[i].charCodeAt(0) } var code = code_.split('') for (i =0; i< code.length; i++){ code[i] = code[i].charCodeAt(0) } debugger var s = cbb_op(mm, 30); console.log("dadasd",s) cbb_prg(cbb_op(mm, 30), cbb_hu(mm2, 6786), cbb_hu2(code, 76)) var dt = '' for (i = 0 ; i<30; i++){ dt += f[i] } console.log(f) var s = {"zhaohao": mm_,"mima":mm2_,"code":code_,"sign":dt} return s } if (window){ window.sign = sign; }else { global.sign = sign; } var tw = sign() debugger console.log(tw) console.log(JSON.stringify(tw))
2833844911/cy_jsvmp
3,060
outsrc/out2.js
// 线程版例子 var c_f_0 = 0; var c_f_1 = 29; var c_f_2 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]; // “cbb_”开头的是特殊函数(特殊函数才可以异步) function cbb_hu(c_f_3, c_f_4) { for (var c_f_5 = 0; c_f_5 < c_f_3.length; c_f_6 = c_f_5, ++c_f_5, c_f_6) { var c_f_6; for (var c_f_7 = 0; c_f_7 < c_f_4 % 10; c_f_8 = c_f_7, ++c_f_7, c_f_8) { var c_f_8; c_f_0 = c_f_7; } if (c_f_1 + 1 === 1) { c_f_1 = 29; } cbb_prgnx(); console.log(c_f_1, c_f_3[c_f_5] + c_f_4 % c_f_1 * 3, c_f_2[c_f_1], c_f_1); c_f_2[c_f_1] = c_f_2[c_f_1] + c_f_3[c_f_5] + c_f_4 % c_f_1 * 3; cbb_prgunx(); c_f_1 -= 1; } return 90; } function cbb_hu2(c_f_9, c_f_10) { for (var c_f_11 = 0; c_f_11 < c_f_9.length; c_f_12 = c_f_11, ++c_f_11, c_f_12) { var c_f_12; for (var c_f_13 = 0; c_f_13 < c_f_10 % 20; c_f_14 = c_f_13, ++c_f_13, c_f_14) { var c_f_14; c_f_0 = c_f_13; } if (c_f_1 + 1 === 1) { c_f_1 = 29; } cbb_prgnx(); console.log(c_f_1, c_f_9[c_f_11] + c_f_10 % c_f_1, c_f_2[c_f_1], c_f_1); c_f_2[c_f_1] = c_f_2[c_f_1] + c_f_9[c_f_11] + c_f_10 % c_f_1; cbb_prgunx(); c_f_1 -= 1; } return 90; } function cbb_op(c_f_15, c_f_16) { for (var c_f_17 = 0; c_f_17 < c_f_15.length; c_f_18 = c_f_17, ++c_f_17, c_f_18) { var c_f_18; for (var c_f_19 = 0; c_f_19 < c_f_16 % 30; c_f_20 = c_f_19, ++c_f_19, c_f_20) { var c_f_20; c_f_0 = c_f_19; } if (c_f_1 + 1 === 1) { c_f_1 = 29; } // 线程锁开启 cbb_prgnx(); console.log(29 - c_f_1, c_f_15[c_f_17] + c_f_1 + c_f_16, c_f_2[29 - c_f_1], c_f_1); c_f_2[29 - c_f_1] = c_f_2[29 - c_f_1] + c_f_15[c_f_17] + c_f_1 + c_f_16; // 线程锁关闭 cbb_prgunx(); c_f_1 -= 1; } return 0; } function c_f_21() { var c_f_22 = "2833844911"; var c_f_23 = "udiaudisaoduas"; var c_f_24 = "565767"; var c_f_25 = c_f_22.split(''); for (var c_f_26 = 0; c_f_26 < c_f_25.length; c_f_27 = c_f_26, ++c_f_26, c_f_27) { var c_f_27; c_f_25[c_f_26] = c_f_25[c_f_26].charCodeAt(0); } var c_f_28 = c_f_23.split(''); for (c_f_26 = 0; c_f_26 < c_f_28.length; c_f_29 = c_f_26, ++c_f_26, c_f_29) { var c_f_29; c_f_28[c_f_26] = c_f_28[c_f_26].charCodeAt(0); } var c_f_30 = c_f_24.split(''); for (c_f_26 = 0; c_f_26 < c_f_30.length; c_f_31 = c_f_26, ++c_f_26, c_f_31) { var c_f_31; c_f_30[c_f_26] = c_f_30[c_f_26].charCodeAt(0); } debugger; var c_f_35 = cbb_op(c_f_25, 30); console.log("dadasd", c_f_35); // 开始线程 cbb_prg(cbb_op(c_f_25, 30), cbb_hu(c_f_28, 6786), cbb_hu2(c_f_30, 76)); var c_f_33 = ''; for (c_f_26 = 0; c_f_26 < 30; c_f_34 = c_f_26, ++c_f_26, c_f_34) { var c_f_34; c_f_33 += c_f_2[c_f_26]; } console.log(c_f_2); var c_f_35 = { "zhaohao": c_f_22, "mima": c_f_23, "code": c_f_24, "sign": c_f_33 }; return c_f_35; } if (window) { window.sign = c_f_21; } else { global.sign = c_f_21; } var c_f_36 = c_f_21(); debugger; console.log(c_f_36); console.log(JSON.stringify(c_f_36));
27182812/ChatGLM-LLaMA-chinese-insturct
32,445
src/transformers/models/gpt_neox_japanese/modeling_gpt_neox_japanese.py
# coding=utf-8 # Copyright 2022 ABEJA, Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch GPTNeoX model.""" from typing import Optional, Tuple, Union import torch import torch.utils.checkpoint from torch import Tensor, nn from torch.nn import CrossEntropyLoss from ...activations import ACT2FN from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast from ...modeling_utils import PreTrainedModel from ...utils import logging from .configuration_gpt_neox_japanese import GPTNeoXJapaneseConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "abeja/gpt-neox-japanese-2.7b" _CONFIG_FOR_DOC = "GPTNeoXJapaneseConfig" GPT_NEOX_JAPANESE_PRETRAINED_MODEL_ARCHIVE_LIST = { "https://huggingface.co/abeja/gpt-neox-japanese-2.7b/resolve/main/config.json", # See all GPTNeoXJapanese models at https://huggingface.co/models?filter=gpt_neox_japanese } class GPTNeoXJapanesePreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = GPTNeoXJapaneseConfig base_model_prefix = "gpt_neox_japanese" supports_gradient_checkpointing = True _no_split_modules = ["GPTNeoXJapaneseLayer"] def _init_weights(self, module): """Initialize the weights""" if isinstance(module, nn.Linear): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() elif isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, GPTNeoXJapaneseModel): module.gradient_checkpointing = value class GPTNeoXJapaneseAttention(nn.Module): def __init__(self, config, use_bias=False): super().__init__() self.num_attention_heads = config.num_attention_heads self.hidden_size = config.hidden_size self.head_size = self.hidden_size // self.num_attention_heads self.rotary_ndims = int(self.head_size * config.rotary_pct) self.rotary_emb = RotaryEmbedding( self.rotary_ndims, config.max_position_embeddings, base=config.rotary_emb_base ) self.max_positions = config.max_position_embeddings self.attention_dropout = nn.Dropout(config.attention_dropout) self.norm_factor = torch.sqrt(torch.tensor(self.head_size, dtype=torch.float32)).to(torch.get_default_dtype()) self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=False) self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=False) # Activate bias if the last layer self.use_bias = use_bias self.dense_bias = nn.Parameter(torch.zeros(config.hidden_size)) if use_bias else None def forward( self, hidden_states, attention_mask, head_mask=None, layer_past=None, use_cache=False, output_attentions=False, ): has_layer_past = layer_past is not None and layer_past[0].numel() > 0 # Compute QKV # Attention heads [batch, seq_len, hidden_size] # --> [batch, seq_len, (np * 3 * head_size)] qkv = self.query_key_value(hidden_states) # [batch, seq_len, (num_heads * 3 * head_size)] # --> [batch, seq_len, num_heads, 3 * head_size] new_qkv_shape = qkv.size()[:-1] + (self.num_attention_heads, 3 * self.head_size) qkv = qkv.view(*new_qkv_shape) # [batch, seq_len, num_attention_heads, 3 * head_size] --> 3 [batch, num_attention_heads, seq_len, head_size] query = qkv[..., : self.head_size].permute(0, 2, 1, 3) key = qkv[..., self.head_size : 2 * self.head_size].permute(0, 2, 1, 3) value = qkv[..., 2 * self.head_size :].permute(0, 2, 1, 3) # Compute rotary embeddings on rotary_ndims query_rot = query[..., : self.rotary_ndims] query_pass = query[..., self.rotary_ndims :] key_rot = key[..., : self.rotary_ndims] key_pass = key[..., self.rotary_ndims :] # Compute token offset for rotary embeddings (when decoding) seq_len = key.shape[-2] offset = 0 if has_layer_past: offset = layer_past[0].shape[-2] seq_len += offset cos, sin = self.rotary_emb(value, seq_len=seq_len) query, key = apply_rotary_pos_emb(query_rot, key_rot, cos, sin, offset=offset) query = torch.cat((query, query_pass), dim=-1) key = torch.cat((key, key_pass), dim=-1) # Cache QKV values if has_layer_past: past_key = layer_past[0] past_value = layer_past[1] key = torch.cat((past_key, key), dim=-2) value = torch.cat((past_value, value), dim=-2) present = (key, value) if use_cache else None # Compute attention attn_output, attn_weights = self._attn(query, key, value, attention_mask, head_mask) # Reshape outputs attn_output = self._merge_heads(attn_output, self.num_attention_heads, self.head_size) attn_output = self.dense(attn_output) outputs = (attn_output, present) if output_attentions: outputs += (attn_weights,) return outputs, self.dense_bias @classmethod def _split_heads(cls, tensor, num_attention_heads, attn_head_size): """ Splits hidden dim into attn_head_size and num_attention_heads """ # tensor: [bs, seq_len, hidden_size] new_shape = tensor.size()[:-1] + (num_attention_heads, attn_head_size) # -> [bs, seq_len, num_attention_heads, attn_head_size] tensor = tensor.view(new_shape) # -> [bs, num_attention_heads, seq_len, attn_head_size] tensor = tensor.permute(0, 2, 1, 3) return tensor @classmethod def _merge_heads(cls, tensor, num_attention_heads, attn_head_size): """ Merges attn_head_size dim and num_attn_heads dim into hidden dim """ # tensor [bs, num_attention_heads, seq_len, attn_head_size] tensor = tensor.permute(0, 2, 1, 3).contiguous() # -> [bs, seq_len, num_attention_heads, attn_head_size] tensor = tensor.view(tensor.size(0), tensor.size(1), num_attention_heads * attn_head_size) # -> [bs, seq_len, hidden_size] return tensor def _create_causal_mask(self, key_length, query_length): causal_mask = torch.tril( torch.ones((self.max_positions, self.max_positions), dtype=torch.bool).view( 1, 1, self.max_positions, self.max_positions ) ) return causal_mask[:, :, key_length - query_length : key_length, :key_length] def _attn(self, query, key, value, attention_mask=None, head_mask=None): # q, k, v: [bs, num_attention_heads, seq_len, attn_head_size] # compute causal mask from causal mask buffer batch_size, num_attention_heads, query_length, attn_head_size = query.size() key_length = key.size(-2) causal_mask = self._create_causal_mask(key_length, query_length) query = query.view(batch_size * num_attention_heads, query_length, attn_head_size) key = key.view(batch_size * num_attention_heads, key_length, attn_head_size) attn_scores = torch.zeros( batch_size * num_attention_heads, query_length, key_length, dtype=query.dtype, device=key.device, ) attn_scores = torch.baddbmm( attn_scores, query, key.transpose(1, 2), beta=1.0, alpha=(torch.tensor(1.0, dtype=self.norm_factor.dtype, device=self.norm_factor.device) / self.norm_factor), ) attn_scores = attn_scores.view(batch_size, num_attention_heads, query_length, key_length) mask_value = torch.finfo(attn_scores.dtype).min # Need to be a tensor, otherwise we get error: `RuntimeError: expected scalar type float but found double`. # Need to be on the same device, otherwise `RuntimeError: ..., x and y to be on the same device` mask_value = torch.tensor(mask_value, dtype=attn_scores.dtype).to(attn_scores.device) causal_mask = causal_mask.to(attn_scores.device) attn_scores = torch.where(causal_mask, attn_scores, mask_value) if attention_mask is not None: # Apply the attention mask attn_scores = attn_scores + attention_mask attn_weights = nn.functional.softmax(attn_scores, dim=-1) attn_weights = self.attention_dropout(attn_weights) attn_weights = attn_weights.to(value.dtype) # Mask heads if we want to if head_mask is not None: attn_weights = attn_weights * head_mask attn_output = torch.matmul(attn_weights, value) return attn_output, attn_weights # Copied from transformers.models.gpt_neox.modeling_gpt_neox.RotaryEmbedding class RotaryEmbedding(torch.nn.Module): def __init__(self, dim, max_position_embeddings, base=10000, device=None): super().__init__() inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2).float().to(device) / dim)) self.register_buffer("inv_freq", inv_freq) # Build here to make `torch.jit.trace` work. self.max_seq_len_cached = max_position_embeddings t = torch.arange(self.max_seq_len_cached, device=self.inv_freq.device, dtype=self.inv_freq.dtype) freqs = torch.einsum("i,j->ij", t, self.inv_freq) # Different from paper, but it uses a different permutation in order to obtain the same calculation emb = torch.cat((freqs, freqs), dim=-1) self.cos_cached = emb.cos()[None, None, :, :] self.sin_cached = emb.sin()[None, None, :, :] def forward(self, x, seq_len=None): # x: [bs, num_attention_heads, seq_len, head_size] # This `if` block is unlikely to be run after we build sin/cos in `__init__`. Keep the logic here just in case. if seq_len > self.max_seq_len_cached: self.max_seq_len_cached = seq_len t = torch.arange(self.max_seq_len_cached, device=x.device, dtype=self.inv_freq.dtype) freqs = torch.einsum("i,j->ij", t, self.inv_freq) # Different from paper, but it uses a different permutation in order to obtain the same calculation emb = torch.cat((freqs, freqs), dim=-1).to(x.device) self.cos_cached = emb.cos()[None, None, :, :] self.sin_cached = emb.sin()[None, None, :, :] return self.cos_cached[:seq_len, ...].to(x.device), self.sin_cached[:seq_len, ...].to(x.device) def rotate_half(x): """Rotates half the hidden dims of the input.""" x1 = x[..., : x.shape[-1] // 2] x2 = x[..., x.shape[-1] // 2 :] return torch.cat((-x2, x1), dim=-1) def apply_rotary_pos_emb(q, k, cos, sin, offset: int = 0): cos = cos[..., offset : q.shape[-2] + offset, :] sin = sin[..., offset : q.shape[-2] + offset, :] q_embed = (q * cos) + (rotate_half(q) * sin) k_embed = (k * cos) + (rotate_half(k) * sin) return q_embed, k_embed def bias_dropout_add(x: Tensor, bias: Tensor, residual: Optional[Tensor], prob: float, training: bool) -> Tensor: """add bias to x, apply dropout and residual connection Args: x (Tensor): main path of output bias (Tensor): None or attn_bias of the last attention layer residual (Optional[Tensor]): residual value prob (float): dropout probability training (bool): whether in training mode or not Returns: Tensor: dropout(x + bias) + residual """ if bias is not None: x = x + bias out = torch.nn.functional.dropout(x, p=prob, training=training) if residual is not None: out = residual + out return out class GPTNeoXJapaneseMLP(nn.Module): def __init__(self, config): super().__init__() intermediate_size = int(config.hidden_size * config.intermediate_multiple_size) self.dense_h_to_4h = nn.Linear(config.hidden_size, intermediate_size, bias=False) # Project back to h. self.dense_4h_to_h = nn.Linear(intermediate_size, config.hidden_size, bias=False) self.act = ACT2FN[config.hidden_act] def forward(self, hidden_states): intermediate = self.dense_h_to_4h(hidden_states) intermediate = self.act(intermediate) output = self.dense_4h_to_h(intermediate) return output class GPTNeoXJapaneseLayer(nn.Module): def __init__(self, config, layer_number): super().__init__() self.layer_number = layer_number self.input_layernorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.post_attention_layernorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) # activate bias only last layer self.attention = GPTNeoXJapaneseAttention(config=config, use_bias=layer_number == config.num_hidden_layers - 1) self.mlp = GPTNeoXJapaneseMLP(config) self.hidden_dropout = config.hidden_dropout def forward( self, hidden_states, attention_mask=None, head_mask=None, use_cache=False, layer_past=None, output_attentions=False, ): residual = hidden_states ln_out = self.input_layernorm(hidden_states) attention_layer_outputs, attn_bias = self.attention( ln_out, attention_mask=attention_mask, layer_past=layer_past, head_mask=head_mask, use_cache=use_cache, output_attentions=output_attentions, ) attn_output = attention_layer_outputs[0] # output_attn: a, present, (attentions) outputs = attention_layer_outputs[1:] # attn_output = (atten_output + bias) + residual attn_output = bias_dropout_add( attn_output, bias=attn_bias.expand_as(residual) if attn_bias is not None else attn_bias, residual=residual, prob=self.hidden_dropout, training=self.training, ) mlp_output = self.mlp(self.post_attention_layernorm(attn_output)) # attn_output = (mlp_output + mlp_bias) + atten_output attn_output = bias_dropout_add( mlp_output, bias=None, residual=attn_output, prob=self.hidden_dropout, training=self.training ) if use_cache: outputs = (attn_output,) + outputs else: outputs = (attn_output,) + outputs[1:] return outputs # hidden_states, present, (attentions) GPT_NEOX_JAPANESE_START_DOCSTRING = r""" This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`~GPTNeoXJapaneseConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ GPT_NEOX_JAPANESE_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. position_ids (`torch.LongTensor` of shape `({0})`, *optional*): Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert *input_ids* indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare GPTNeoXJapanese Model transformer outputting raw hidden-states without any specific head on top.", GPT_NEOX_JAPANESE_START_DOCSTRING, ) class GPTNeoXJapaneseModel(GPTNeoXJapanesePreTrainedModel): def __init__(self, config): super().__init__(config) self.config = config self.embed_in = nn.Embedding(config.vocab_size, config.hidden_size) self.layers = nn.ModuleList( [GPTNeoXJapaneseLayer(config=config, layer_number=i) for i in range(config.num_hidden_layers)] ) self.final_layer_norm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embed_in def set_input_embeddings(self, value): self.embed_in = value @add_start_docstrings_to_model_forward(GPT_NEOX_JAPANESE_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=BaseModelOutputWithPast, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, BaseModelOutputWithPast]: r""" past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). Returns: Example: ```python >>> from transformers import AutoTokenizer, GPTNeoXJapaneseModel >>> import torch >>> tokenizer = AutoTokenizer.from_pretrained("abeja/gpt-neox-japanese-2.7b") >>> model = GPTNeoXJapaneseModel.from_pretrained("abeja/gpt-neox-japanese-2.7b") >>> inputs = tokenizer("日本語のGPT-neoxがHugging Faceで使えます😀", return_tensors="pt") >>> outputs = model(**inputs) >>> last_hidden_states = outputs.last_hidden_state ``` """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict use_cache = use_cache if use_cache is not None else self.config.use_cache if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") batch_size, seq_length = input_shape if past_key_values is None: past_key_values = tuple([None] * self.config.num_hidden_layers) # Attention mask. if attention_mask is not None: if not batch_size > 0: raise ValueError("batch_size has to be defined and > 0") attention_mask = attention_mask.view(batch_size, -1) # We create a 3D attention mask from a 2D tensor mask. # Sizes are [batch_size, 1, 1, to_seq_length] # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] # this attention mask is more simple than the triangular masking of causal attention # used in OpenAI GPT, we just need to prepare the broadcast dimension here. attention_mask = attention_mask[:, None, None, :] # Since attention_mask is 1.0 for positions we want to attend and 0.0 for # masked positions, this operation will create a tensor which is 0.0 for # positions we want to attend and -10000.0 for masked positions. # Since we are adding it to the raw scores before the softmax, this is # effectively the same as removing these entirely. attention_mask = attention_mask.to(dtype=self.dtype) # fp16 compatibility attention_mask = (1.0 - attention_mask) * torch.finfo(self.dtype).min # Prepare head mask if needed # 1.0 in head_mask indicate we keep the head # attention_probs has shape bsz x n_heads x N x N # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) if inputs_embeds is None: inputs_embeds = self.embed_in(input_ids) hidden_states = inputs_embeds presents = () if use_cache else None all_attentions = () if output_attentions else None all_hidden_states = () if output_hidden_states else None for i, (layer, layer_past) in enumerate(zip(self.layers, past_key_values)): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) outputs = layer( hidden_states, attention_mask=attention_mask, head_mask=head_mask[i], layer_past=layer_past, use_cache=use_cache, output_attentions=output_attentions, ) hidden_states = outputs[0] if use_cache is True: presents = presents + (outputs[1],) if output_attentions: all_attentions = all_attentions + (outputs[2 if use_cache else 1],) hidden_states = self.final_layer_norm(hidden_states) # Add last hidden state if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, presents, all_hidden_states, all_attentions] if v is not None) return BaseModelOutputWithPast( last_hidden_state=hidden_states, past_key_values=presents, hidden_states=all_hidden_states, attentions=all_attentions, ) @add_start_docstrings( """GPTNeoXJapanese Model with a `language modeling` head on top for Classifier Model fine-tuning.""", GPT_NEOX_JAPANESE_START_DOCSTRING, ) class GPTNeoXJapaneseForCausalLM(GPTNeoXJapanesePreTrainedModel): _keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias", "embed_out.weight"] def __init__(self, config): super().__init__(config) self.config = config self.gpt_neox_japanese = GPTNeoXJapaneseModel(config) self.embed_out = nn.Linear(config.hidden_size, config.vocab_size, bias=False) # Initialize weights and apply final processing self.post_init() def get_output_embeddings(self): return self.embed_out def set_output_embeddings(self, new_embeddings): self.embed_out = new_embeddings @add_start_docstrings_to_model_forward(GPT_NEOX_JAPANESE_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, CausalLMOutputWithPast]: r""" past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. The two additional tensors are only required when the model is used as a decoder in a Sequence to Sequence model. Contains pre-computed hidden-states (key and values in the self-attention blocks that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels n `[0, ..., config.vocab_size]`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). Returns: Example: ```python >>> from transformers import AutoTokenizer, GPTNeoXJapaneseForCausalLM, GPTNeoXJapaneseConfig >>> import torch >>> tokenizer = AutoTokenizer.from_pretrained("abeja/gpt-neox-japanese-2.7b") >>> config = GPTNeoXJapaneseConfig.from_pretrained("abeja/gpt-neox-japanese-2.7b") >>> config.is_decoder = True >>> model = GPTNeoXJapaneseForCausalLM.from_pretrained("abeja/gpt-neox-japanese-2.7b", config=config) >>> inputs = tokenizer("日本語のGPT-neoxがHugging Faceで使えます😀", return_tensors="pt") >>> outputs = model(**inputs) >>> prediction_logits = outputs.logits ``` """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.gpt_neox_japanese( input_ids, attention_mask=attention_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, past_key_values=past_key_values, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) hidden_states = outputs[0] lm_logits = self.embed_out(hidden_states) lm_loss = None if labels is not None: # we are doing next-token prediction; shift prediction scores and input ids by one shift_logits = lm_logits[:, :-1, :].contiguous() labels = labels[:, 1:].contiguous() loss_fct = CrossEntropyLoss() lm_loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), labels.view(-1)) if not return_dict: output = (lm_logits,) + outputs[1:] return ((lm_loss,) + output) if lm_loss is not None else output return CausalLMOutputWithPast( loss=lm_loss, logits=lm_logits, past_key_values=outputs.past_key_values, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def prepare_inputs_for_generation(self, input_ids, past_key_values=None, attention_mask=None, **model_kwargs): input_shape = input_ids.shape # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly if attention_mask is None: attention_mask = input_ids.new_ones(input_shape) # cut decoder_input_ids if past is used if past_key_values and past_key_values[0] is not None: input_ids = input_ids[:, -1:] return {"input_ids": input_ids, "attention_mask": attention_mask, "past_key_values": past_key_values} def _reorder_cache(self, past_key_values, beam_idx): reordered_past = () for layer_past in past_key_values: reordered_past += ( tuple(past_state.index_select(0, beam_idx) for past_state in layer_past[:2]) + layer_past[2:], ) return reordered_past
27182812/ChatGLM-LLaMA-chinese-insturct
1,362
src/transformers/models/bartpho/__init__.py
# Copyright 2021 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available _import_structure = {} try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_bartpho"] = ["BartphoTokenizer"] if TYPE_CHECKING: try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_bartpho import BartphoTokenizer else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
14,217
src/transformers/models/bartpho/tokenization_bartpho.py
# coding=utf-8 # Copyright 2021 VinAI Research and the HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License """ Tokenization classes for BARTpho-syllable model.""" import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging logger = logging.get_logger(__name__) SPIECE_UNDERLINE = "▁" VOCAB_FILES_NAMES = {"vocab_file": "sentencepiece.bpe.model", "monolingual_vocab_file": "dict.txt"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "vinai/bartpho-syllable": "https://huggingface.co/vinai/bartpho-syllable/resolve/main/sentencepiece.bpe.model", }, "monolingual_vocab_file": { "vinai/bartpho-syllable": "https://huggingface.co/vinai/bartpho-syllable/resolve/main/dict.txt", }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {"vinai/bartpho-syllable": 1024} class BartphoTokenizer(PreTrainedTokenizer): """ Adapted from [`XLMRobertaTokenizer`]. Based on [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): Path to the vocabulary file. This vocabulary is the pre-trained SentencePiece model available from the multilingual XLM-RoBERTa, also used in mBART, consisting of 250K types. monolingual_vocab_file (`str`): Path to the monolingual vocabulary file. This monolingual vocabulary consists of Vietnamese-specialized types extracted from the multilingual vocabulary vocab_file of 250K types. bos_token (`str`, *optional*, defaults to `"<s>"`): The beginning of sequence token that was used during pretraining. Can be used a sequence classifier token. <Tip> When building a sequence using special tokens, this is not the token that is used for the beginning of sequence. The token used is the `cls_token`. </Tip> eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. <Tip> When building a sequence using special tokens, this is not the token that is used for the end of sequence. The token used is the `sep_token`. </Tip> sep_token (`str`, *optional*, defaults to `"</s>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. cls_token (`str`, *optional*, defaults to `"<s>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. additional_special_tokens (`List[str]`, *optional*, defaults to `["<s>NOTUSED", "</s>NOTUSED"]`): Additional special tokens used by the tokenizer. sp_model_kwargs (`dict`, *optional*): Will be passed to the `SentencePieceProcessor.__init__()` method. The [Python wrapper for SentencePiece](https://github.com/google/sentencepiece/tree/master/python) can be used, among other things, to set: - `enable_sampling`: Enable subword regularization. - `nbest_size`: Sampling parameters for unigram. Invalid for BPE-Dropout. - `nbest_size = {0,1}`: No sampling is performed. - `nbest_size > 1`: samples from the nbest_size results. - `nbest_size < 0`: assuming that nbest_size is infinite and samples from the all hypothesis (lattice) using forward-filtering-and-backward-sampling algorithm. - `alpha`: Smoothing parameter for unigram sampling, and dropout probability of merge operations for BPE-dropout. Attributes: sp_model (`SentencePieceProcessor`): The *SentencePiece* processor that is used for every conversion (string, tokens and IDs). """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES model_input_names = ["input_ids", "attention_mask"] def __init__( self, vocab_file, monolingual_vocab_file, bos_token="<s>", eos_token="</s>", sep_token="</s>", cls_token="<s>", unk_token="<unk>", pad_token="<pad>", mask_token="<mask>", sp_model_kwargs: Optional[Dict[str, Any]] = None, **kwargs, ) -> None: # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token self.sp_model_kwargs = {} if sp_model_kwargs is None else sp_model_kwargs super().__init__( bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, cls_token=cls_token, pad_token=pad_token, mask_token=mask_token, sp_model_kwargs=self.sp_model_kwargs, **kwargs, ) self.vocab_file = vocab_file self.monolingual_vocab_file = monolingual_vocab_file self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(str(vocab_file)) # Load the reduced vocab # Keep order of special tokens for backward compatibility self.fairseq_tokens_to_ids = {} cnt = 0 for token in [bos_token, pad_token, eos_token, unk_token, sep_token, cls_token]: if str(token) not in self.fairseq_tokens_to_ids: self.fairseq_tokens_to_ids[str(token)] = cnt cnt += 1 with open(monolingual_vocab_file, "r", encoding="utf-8") as f: for line in f.readlines(): token = line.strip().split()[0] self.fairseq_tokens_to_ids[token] = len(self.fairseq_tokens_to_ids) if str(mask_token) not in self.fairseq_tokens_to_ids: self.fairseq_tokens_to_ids[str(mask_token)] = len(self.fairseq_tokens_to_ids) self.fairseq_ids_to_tokens = {v: k for k, v in self.fairseq_tokens_to_ids.items()} def __getstate__(self): state = self.__dict__.copy() state["sp_model"] = None state["sp_model_proto"] = self.sp_model.serialized_model_proto() return state def __setstate__(self, d): self.__dict__ = d # for backward compatibility if not hasattr(self, "sp_model_kwargs"): self.sp_model_kwargs = {} self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.LoadFromSerializedProto(self.sp_model_proto) def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. An BARTPho sequence has the following format: - single sequence: `<s> X </s>` - pair of sequences: `<s> A </s></s> B </s>` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ if token_ids_1 is None: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + sep + token_ids_1 + sep def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_special_tokens (`bool`, *optional*, defaults to `False`): Whether or not the token list is already formatted with special tokens for the model. Returns: `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True ) if token_ids_1 is None: return [1] + ([0] * len(token_ids_0)) + [1] return [1] + ([0] * len(token_ids_0)) + [1, 1] + ([0] * len(token_ids_1)) + [1] def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. BARTPho does not make use of token type ids, therefore a list of zeros is returned. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of zeros. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep + sep + token_ids_1 + sep) * [0] @property def vocab_size(self): return len(self.fairseq_ids_to_tokens) def get_vocab(self): vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} vocab.update(self.added_tokens_encoder) return vocab def _tokenize(self, text: str) -> List[str]: return self.sp_model.encode(text, out_type=str) def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" if token in self.fairseq_tokens_to_ids: return self.fairseq_tokens_to_ids[token] else: return self.unk_token_id def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" return self.fairseq_ids_to_tokens[index] def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (strings for sub-words) in a single string.""" out_string = "".join(tokens).replace(SPIECE_UNDERLINE, " ").strip() return out_string def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) out_monolingual_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["monolingual_vocab_file"], ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file) and os.path.isfile(self.vocab_file): copyfile(self.vocab_file, out_vocab_file) elif not os.path.isfile(self.vocab_file): with open(out_vocab_file, "wb") as fi: content_spiece_model = self.sp_model.serialized_model_proto() fi.write(content_spiece_model) if os.path.abspath(self.monolingual_vocab_file) != os.path.abspath( out_monolingual_vocab_file ) and os.path.isfile(self.monolingual_vocab_file): copyfile(self.monolingual_vocab_file, out_monolingual_vocab_file) elif not os.path.isfile(self.monolingual_vocab_file): with open(out_monolingual_vocab_file, "w", encoding="utf-8") as fp: for token in self.fairseq_tokens_to_ids: if token not in self.all_special_tokens: fp.write(f"{str(token)} \n") return out_vocab_file, out_monolingual_vocab_file
2833844911/cy_jsvmp
1,348,349
outsrc/script.js
var shengxu=[];function cbbfuntome(a){var i=shengxu[a.cbb];a.cbb+=1 return i;};var constantPool=["c_f_1","c_f_3","c_f_4",4294967295,"awcbb_yhh_fun2","c_f_2","0","1","2","3","4","5","6","7","8","9","a","b","c","d","e","f","c_f_5","c_f_7","c_f_8","c_f_9","c_f_10","c_f_11","c_f_12",32,"c_f_14","c_f_15",0,"c_f_16",1,"c_f_17",2,"c_f_18",3,"c_f_19",680876936,25,7,389564586,20,12,606105819,15,17,1044525330,10,22,176418897,4,1200080426,5,1473231341,6,45705983,1770035416,8,1958414417,9,42063,1990404162,11,1804603682,40341101,13,1502002290,14,1236535329,165796510,27,1069501632,23,643717713,18,373897302,701558691,38016083,660478335,405537848,568446438,1019803690,187363961,1163531501,1444681467,51403784,1735328473,1926607734,378558,28,2022574463,21,1839030562,16,35309556,1530992060,1272893353,155497632,1094730640,681279174,358537222,722521979,76029189,640364487,421815835,530742520,995338651,198630844,26,1126891415,1416354905,57434055,1700485571,1894986606,1051523,2054922799,1873313359,30611744,1560198380,1309151649,145523070,1120210379,718787259,343485551,"c_f_21","c_f_22","c_f_23",64,24,"charCodeAt","c_f_25","c_f_26","c_f_27","c_f_29","length","c_f_30",1732584193,271733879,1732584194,271733878,"c_f_31","c_f_32","substring","c_f_20","c_f_13","c_f_33","c_f_34",128,55,"c_f_35","(.*?)(.{0,8})$","","toString","match","c_f_36","parseInt","c_f_37","c_f_39","c_f_40","c_f_41","c_f_42","subarray","c_f_24","Uint8Array","c_f_43","c_f_44","c_f_45","c_f_46","c_f_47","c_f_49","c_f_50","c_f_51","c_f_53","c_f_54","c_f_48","join","5d41402abc4b2a76b9719d911017c592","hello","c_f_28","c_f_52","c_f_55","c_f_56",65535,"c_f_57","c_f_58","awcbb_yhh_fun11","undefined","ArrayBuffer","prototype","slice","c_f_60","c_f_61","Math","max","min","c_f_62","c_f_63","byteLength","c_f_64","c_f_59","c_f_65","c_f_66","c_f_67","c_f_68","c_f_69","c_f_70","set","awcbb_yhh_fun14","awcbb_yhh_fun12","c_f_72","[\\u0080-\\uFFFF]","test","encodeURIComponent","unescape","c_f_74","c_f_75","c_f_76","c_f_77","c_f_78","c_f_79","c_f_81",null,"String","fromCharCode","apply","c_f_83","c_f_84","c_f_85","c_f_86","buffer","c_f_88","c_f_89","c_f_90","c_f_91","substr","push","reset","c_f_92","append","c_f_93","c_f_71","appendBinary","awcbb_yhh_fun21","c_f_94","_buff","_length","c_f_95","c_f_96","_hash","awcbb_yhh_fun22","end","c_f_97","c_f_98","c_f_99","c_f_101","c_f_100","_finish","c_f_102","c_f_87","awcbb_yhh_fun23","awcbb_yhh_fun24","getState","buff","hash","awcbb_yhh_fun25","setState","c_f_103","awcbb_yhh_fun26","destroy","awcbb_yhh_fun27","c_f_104","c_f_105","c_f_106","c_f_107","c_f_108","c_f_109","awcbb_yhh_fun28","c_f_110","c_f_111","hashBinary","awcbb_yhh_fun29","c_f_112","c_f_113","c_f_114","c_f_115","awcbb_yhh_fun30","awcbb_yhh_fun31","c_f_116",true,"c_f_82","c_f_117","c_f_118","c_f_119","awcbb_yhh_fun32","c_f_120","c_f_121","c_f_122","c_f_123","c_f_124","c_f_125","awcbb_yhh_fun33","awcbb_yhh_fun34","call","c_f_126","c_f_80","awcbb_yhh_fun35","c_f_127","c_f_73","awcbb_yhh_fun36","c_f_128","c_f_129","c_f_38","c_f_130","c_f_131","awcbb_yhh_fun37","c_f_6","window","self","document","name","location","customElements","history","locationbar","menubar","personalbar","scrollbars","statusbar","toolbar","status","closed","frames","top","opener","parent","frameElement","navigator","origin","external","screen","innerWidth","innerHeight","scrollX","pageXOffset","scrollY","pageYOffset","visualViewport","screenX","screenY","outerWidth","outerHeight","devicePixelRatio","clientInformation","screenLeft","screenTop","styleMedia","onsearch","isSecureContext","trustedTypes","performance","onappinstalled","onbeforeinstallprompt","crypto","indexedDB","sessionStorage","localStorage","onbeforexrselect","onabort","onbeforeinput","onblur","oncancel","oncanplay","oncanplaythrough","onchange","onclick","onclose","oncontextlost","oncontextmenu","oncontextrestored","oncuechange","ondblclick","ondrag","ondragend","ondragenter","ondragleave","ondragover","ondragstart","ondrop","ondurationchange","onemptied","onended","onerror","onfocus","onformdata","oninput","oninvalid","onkeydown","onkeypress","onkeyup","onload","onloadeddata","onloadedmetadata","onloadstart","onmousedown","onmouseenter","onmouseleave","onmousemove","onmouseout","onmouseover","onmouseup","onmousewheel","onpause","onplay","onplaying","onprogress","onratechange","onreset","onresize","onscroll","onsecuritypolicyviolation","onseeked","onseeking","onselect","onslotchange","onstalled","onsubmit","onsuspend","ontimeupdate","ontoggle","onvolumechange","onwaiting","onwebkitanimationend","onwebkitanimationiteration","onwebkitanimationstart","onwebkittransitionend","onwheel","onauxclick","ongotpointercapture","onlostpointercapture","onpointerdown","onpointermove","onpointerrawupdate","onpointerup","onpointercancel","onpointerover","onpointerout","onpointerenter","onpointerleave","onselectstart","onselectionchange","onanimationend","onanimationiteration","onanimationstart","ontransitionrun","ontransitionstart","ontransitionend","ontransitioncancel","onafterprint","onbeforeprint","onbeforeunload","onhashchange","onlanguagechange","onmessage","onmessageerror","onoffline","ononline","onpagehide","onpageshow","onpopstate","onrejectionhandled","onstorage","onunhandledrejection","onunload","crossOriginIsolated","scheduler","cbbopen","alert","atob","blur","btoa","cancelAnimationFrame","cancelIdleCallback","captureEvents","clearInterval","clearTimeout","close","confirm","createImageBitmap","fetch","find","focus","getComputedStyle","getSelection","matchMedia","moveBy","moveTo","open","postMessage","print","prompt","queueMicrotask","releaseEvents","reportError","requestAnimationFrame","requestIdleCallback","resizeBy","resizeTo","scroll","scrollBy","scrollTo","setInterval","setTimeout","stop","structuredClone","webkitCancelAnimationFrame","webkitRequestAnimationFrame","chrome","caches","cookieStore","ondevicemotion","ondeviceorientation","ondeviceorientationabsolute","launchQueue","onbeforematch","onpopuphide","onpopupshow","getScreenDetails","queryLocalFonts","showDirectoryPicker","showOpenFilePicker","showSaveFilePicker","originAgentCluster","navigation","webkitStorageInfo","speechSynthesis","oncontentvisibilityautostatechanged","openDatabase","webkitRequestFileSystem","webkitResolveLocalFileSystemURL","JSCompiler_renameProperty","ShadyCSS","cr","loadTimeData","c_f_132","c_f_134","c_f_136","c_f_135","c_f_137",2048,"c_f_138",55296,224,65536,56320,240,63,31,192,"c_f_146","c_f_133","c_f_141","c_f_145","Uint32Array","c_f_144","DataView","c_f_155","getUint32","c_f_147","c_f_153","awcbb_yhh_fun40","awcbb_yhh_fun41","awcbb_yhh_fun42","awcbb_yhh_fun43","c_f_148","c_f_150","c_f_151","awcbb_yhh_fun44","c_f_149",1518500250,1859775393,1894007588,899497514,"c_f_152",1009589776,"c_f_154","c_f_143",80,30,"pop","unshift","c_f_157","awcbb_yhh_fun45","Array","map","c_f_156","c_f_158","c_f_159","c_f_160","c_f_161","lib","c_f_162","Base","extend","c_f_165","c_f_164","c_f_166","mixIn","init","hasOwnProperty","arguments","$super","awcbb_yhh_fun50","awcbb_yhh_fun49","create","c_f_167","awcbb_yhh_fun51","awcbb_yhh_fun52","c_f_168","for_in_xh_cbb","for_in_xh_cbb_list","c_f_169","awcbb_yhh_fun53","clone","awcbb_yhh_fun54","awcbb_yhh_fun47","c_f_163","WordArray","c_f_171","c_f_172","words","sigBytes","awcbb_yhh_fun55","c_f_173","c_f_197","stringify","awcbb_yhh_fun56","concat","c_f_174","c_f_175","c_f_176","c_f_177","c_f_178","clamp","c_f_181",255,"c_f_180","awcbb_yhh_fun57","c_f_182","c_f_183","ceil","awcbb_yhh_fun58","c_f_184","awcbb_yhh_fun59","random","c_f_185","c_f_186","c_f_189",987654321,"c_f_190","c_f_191",36969,18000,"c_f_192",4294967296,0.5,"awcbb_yhh_fun62","awcbb_yhh_fun61","c_f_187","c_f_193","c_f_194","c_f_195",987654071,"c_f_170","awcbb_yhh_fun60","enc","c_f_196","Hex","c_f_198","c_f_199","c_f_200","c_f_201","c_f_202","c_f_203","awcbb_yhh_fun63","parse","c_f_204","c_f_205","c_f_206","c_f_207","awcbb_yhh_fun64","Latin1","c_f_209","c_f_210","c_f_211","c_f_212","c_f_213","c_f_214","awcbb_yhh_fun65","c_f_215","c_f_216","c_f_217","c_f_218","awcbb_yhh_fun66","c_f_208","Utf8","c_f_220","escape","decodeURIComponent","c_f_221","Malformed UTF-8 data","Error","awcbb_yhh_fun67","c_f_222","awcbb_yhh_fun68","c_f_219","BufferedBlockAlgorithm","_data","_nDataBytes","awcbb_yhh_fun69","_append","c_f_224","string","awcbb_yhh_fun70","_process","c_f_225","c_f_226","c_f_227","c_f_228","blockSize","c_f_229","c_f_230","c_f_231","_minBufferSize","c_f_232","c_f_233","c_f_234","_doProcessBlock","splice","c_f_235","awcbb_yhh_fun71","c_f_236","awcbb_yhh_fun72","c_f_223","Hasher","cfg","c_f_238","awcbb_yhh_fun73","_doReset","awcbb_yhh_fun74","update","c_f_239","awcbb_yhh_fun75","finalize","c_f_240","_doFinalize","c_f_241","awcbb_yhh_fun76",512,"_createHelper","c_f_242","c_f_243","c_f_244","awcbb_yhh_fun78","awcbb_yhh_fun77","_createHmacHelper","c_f_245","c_f_246","c_f_247","c_f_248","HMAC","awcbb_yhh_fun80","awcbb_yhh_fun79","c_f_237","algo","awcbb_yhh_fun46","c_f_249","c_f_250","c_f_251","c_f_252","Base64","c_f_254","c_f_255","c_f_256","_map","c_f_257","c_f_258","c_f_259","c_f_260","c_f_261","c_f_262","c_f_263","c_f_264",0.75,"charAt","c_f_265","awcbb_yhh_fun82","c_f_266","c_f_267","c_f_268","_reverseMap","c_f_269","c_f_270","c_f_271","indexOf","c_f_272","c_f_273","awcbb_yhh_fun83","ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=","c_f_253","c_f_274","c_f_275","c_f_276","c_f_277","c_f_278","c_f_279","c_f_280","c_f_281","awcbb_yhh_fun81","c_f_282","c_f_283","c_f_284","c_f_285","c_f_286","c_f_287","c_f_288","c_f_289","sin","abs","awcbb_yhh_fun86","MD5",4023233417,2562383102,"awcbb_yhh_fun87","c_f_291","c_f_292","c_f_293","c_f_294","c_f_295",4278255360,16711935,"c_f_296","c_f_297","c_f_298","c_f_299","c_f_300","c_f_301","c_f_302","c_f_303","c_f_304","c_f_305","c_f_306","c_f_307","c_f_308","c_f_309","c_f_310","c_f_311","c_f_312","c_f_313","c_f_314","c_f_315","c_f_316","c_f_328","c_f_337",19,29,"c_f_346",33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,"c_f_355",49,50,51,52,53,54,56,57,58,59,60,61,62,"awcbb_yhh_fun88","c_f_317","c_f_318","c_f_319","c_f_320","floor","c_f_321","c_f_322","c_f_323","c_f_324","c_f_325","c_f_326","awcbb_yhh_fun89","c_f_327","awcbb_yhh_fun90","c_f_290","c_f_329","c_f_330","c_f_331","c_f_332","c_f_333","c_f_334","c_f_335","c_f_336","c_f_338","c_f_339","c_f_340","c_f_341","c_f_342","c_f_343","c_f_344","c_f_345","c_f_347","c_f_348","c_f_349","c_f_350","c_f_351","c_f_352","c_f_353","c_f_354","c_f_356","c_f_357","c_f_358","c_f_359","c_f_360","c_f_361","c_f_362","c_f_363","HmacMD5","awcbb_yhh_fun85","c_f_364","c_f_365","c_f_366","c_f_367","c_f_368","c_f_369","EvpKDF","keySize","hasher","iterations","c_f_371","awcbb_yhh_fun96","compute","c_f_372","c_f_373","c_f_374","c_f_375","c_f_376","c_f_377","c_f_378","c_f_379","c_f_380","c_f_381","awcbb_yhh_fun97","c_f_370","c_f_382","c_f_383","c_f_384","awcbb_yhh_fun98","awcbb_yhh_fun95","Cipher","c_f_385","c_f_386","c_f_387","c_f_388","c_f_389","c_f_390","c_f_391","c_f_392","c_f_393","c_f_394","c_f_395","createEncryptor","c_f_397","c_f_398","_ENC_XFORM_MODE","awcbb_yhh_fun100","createDecryptor","c_f_399","c_f_400","_DEC_XFORM_MODE","awcbb_yhh_fun101","c_f_401","c_f_402","c_f_403","_xformMode","_key","awcbb_yhh_fun102","awcbb_yhh_fun103","process","c_f_404","awcbb_yhh_fun104","c_f_405","c_f_406","awcbb_yhh_fun105","ivSize","c_f_408","c_f_482","c_f_505","c_f_409","encrypt","c_f_410","c_f_411","c_f_412","c_f_407","awcbb_yhh_fun109","decrypt","c_f_413","c_f_414","c_f_415","awcbb_yhh_fun110","awcbb_yhh_fun108","awcbb_yhh_fun106","c_f_396","StreamCipher","flush","c_f_417","awcbb_yhh_fun111","c_f_416","mode","c_f_418","BlockCipherMode","c_f_420","c_f_421","Encryptor","awcbb_yhh_fun112","c_f_422","c_f_423","Decryptor","awcbb_yhh_fun113","c_f_424","c_f_425","_cipher","_iv","awcbb_yhh_fun114","c_f_419","CBC","c_f_427","processBlock","c_f_428","c_f_429","c_f_430","c_f_431","c_f_437","encryptBlock","_prevBlock","awcbb_yhh_fun116","c_f_432","c_f_433","c_f_434","c_f_435","c_f_436","decryptBlock","awcbb_yhh_fun117","c_f_438","c_f_439","c_f_440","c_f_441","c_f_443","c_f_444","awcbb_yhh_fun115","c_f_426","pad","c_f_445","Pkcs7","c_f_447","c_f_448","c_f_449","c_f_450","c_f_451","c_f_452","c_f_453","c_f_454","awcbb_yhh_fun119","unpad","c_f_455","c_f_456","awcbb_yhh_fun120","c_f_446","BlockCipher","padding","c_f_458","iv","c_f_459","c_f_460","c_f_462","_mode","__creator","awcbb_yhh_fun121","c_f_463","c_f_464","awcbb_yhh_fun122","c_f_465","c_f_467","awcbb_yhh_fun123","c_f_457","CipherParams","c_f_469","awcbb_yhh_fun124","c_f_470","formatter","awcbb_yhh_fun125","c_f_468","format","c_f_471","OpenSSL","c_f_473","ciphertext","c_f_474","salt","c_f_475","c_f_477",1398893684,1701076831,"awcbb_yhh_fun126","c_f_478","c_f_479","c_f_480","c_f_481","awcbb_yhh_fun127","c_f_472","SerializableCipher","c_f_483","c_f_484","c_f_485","c_f_486","c_f_487","c_f_488","c_f_489","key","algorithm","awcbb_yhh_fun128","c_f_490","c_f_491","c_f_492","c_f_493","_parse","c_f_494","awcbb_yhh_fun129","c_f_495","c_f_496","awcbb_yhh_fun130","kdf","c_f_497","execute","c_f_499","c_f_500","c_f_501","c_f_502","c_f_503","c_f_504","awcbb_yhh_fun131","c_f_498","PasswordBasedCipher","c_f_506","c_f_507","c_f_508","c_f_509","c_f_510","c_f_511","awcbb_yhh_fun132","c_f_512","c_f_513","c_f_514","c_f_515","c_f_516","c_f_517","awcbb_yhh_fun133","awcbb_yhh_fun99","userAgent","c_f_518","c_f_519","c_f_520","c_f_521","c_f_522","RC4","c_f_524","c_f_525","c_f_526","_S","c_f_527","c_f_529",256,"c_f_530","c_f_531","c_f_532","c_f_533","_i","_j","awcbb_yhh_fun135","c_f_534","c_f_535","c_f_536","awcbb_yhh_fun136","c_f_523","c_f_537","c_f_538","c_f_539","c_f_540","c_f_541","c_f_542","RC4Drop","drop","c_f_544","awcbb_yhh_fun138","c_f_543","awcbb_yhh_fun134","c_f_546","c_f_547","c_f_549","c_f_550","c_f_551","Object","keys","c_f_552","click","c_f_553","c_f_554","c_f_555","split","c_f_556","c_f_557","c_f_558",1181,"preventDefault",".box","querySelector","c_f_559",".truck","c_f_560","done","classList","contains","animation","remove","x","gsap","--progress","--hx","--bx","--box-s","--box-o","--truck-y","--truck-y-n","y","$e","Date","getTime","now","ak",100,300,"add","duration",0.3,"delay","to",0.4,0.7,0.18,0.92,0.1,1.15,0.2,1.25,"onComplete","c_f_561","c_f_562","c_f_565","c_f_567",96,0.6,"c_f_568","c_f_570","c_f_0","c_f_571","----","c_f_545","c_f_572","c_f_563","c_f_564","c_f_573","c_f_139","c_f_574","c_f_575","XMLHttpRequest","c_f_576","load","awcbb_yhh_fun146","addEventListener","POST","/cbbllq2",false,"sign","tm","data","JSON","send","okk","responseText","c_f_577",".success","querySelectorAll","c_f_578","textContent","验证失败","验证成功","awcbb_yhh_fun145","awcbb_yhh_fun144","timeline",2.4,"ease","power2.in","awcbb_yhh_fun143","awcbb_yhh_fun142","awcbb_yhh_fun141",".truck-button","forEach","c_f_548"];var changlc={"awcbb_yhh_fun0":{"variablePool":{"c_f_0":"awcbb_yhh_fun1","c_f_132":null,"c_f_133":"awcbb_yhh_fun38","c_f_139":"awcbb_yhh_fun39","c_f_158":null,"awcbb_yhh_fun46":"awcbb_yhh_fun46","awcbb_yhh_fun81":"awcbb_yhh_fun81","awcbb_yhh_fun85":"awcbb_yhh_fun85","awcbb_yhh_fun95":"awcbb_yhh_fun95","awcbb_yhh_fun99":"awcbb_yhh_fun99","c_f_518":null,"awcbb_yhh_fun134":"awcbb_yhh_fun134","c_f_545":"awcbb_yhh_fun139","c_f_548":"awcbb_yhh_fun140","awcbb_yhh_fun141":"awcbb_yhh_fun141"},"zhili":[81640,1293,50359,562,55502,1301,88520,777,55502,820,75430,948,57485,978,78686,1187,93302,1225,57485,1296,53520,1337,3410,1334,7580,18832,88520,6,2887,62250,319,73695,81640,320,5901,76874,321,5901,58582,322,76224,75430,323,77207,78686,324,90732,55672,325,37737,55672,326,788,93302,327,38646,75430,328,38646,55502,329,38646,75430,330,96778,70729,331,68901,58582,332,96778,88520,333,47616,63915,334,72179,57485,137,90732,78429,335,55961,44863,336,5901,15183,337,68901,58582,338,72179,93007,339,68901,3410,340,72179,19347,341,15063,78429,342,68901,23895,343,2887,66775,344,788,75430,345,604,55672,346,29298,97196,347,34238,3410,348,73670,54324,349,788,78686,350,5901,23895,351,76224,76874,352,54448,70729,353,96778,57485,354,89659,36951,355,55961,27705,356,788,50359,357,15063,70729,358,54448,15183,359,59858,23895,360,73695,78686,361,89659,88838,362,96778,27705,363,41006,70729,364,15063,78429,365,73695,97196,366,15063,50359,367,26354,55672,368,37737,76874,369,604,53520,370,29298,76874,371,89659,44863,372,59858,15183,373,34238,93302,374,47616,58582,375,37211,88520,376,68901,78686,377,89659,88520,378,72179,88520,379,5901,50359,380,68901,23895,381,68901,81640,382,72179,27705,383,47616,93007,384,59858,36951,385,59858,27705,386,77207,81640,387,72179,75430,388,29298,93007,389,604,55672,390,41006,76874,391,59858,78686,392,5901,48564,393,604,66775,394,37211,23895,395,29298,63915,396,604,50359,397,76224,58582,398,90732,53520,399,96778,81640,400,2887,55502,401,604,93007,402,41314,88838,403,51022,63915,404,89659,36951,405,77207,93007,406,90732,54324,407,5901,81640,408,77207,50359,409,76224,28548,410,73670,19347,411,89659,62250,412,54448,76874,413,77207,27705,414,55553,88520,415,2887,53520,416,1286,55672,417,76224,78686,418,73670,75430,419,73670,28548,420,55961,58582,421,788,75430,422,788,3410,423,55553,3410,424,73695,55502,425,41006,53520,426,55553,88520,427,788,75430,428,55961,27705,429,604,23895,430,34238,76874,431,41006,58582,432,47616,44863,433,5901,93007,434,51022,55672,435,47616,93302,436,34238,55502,437,1286,93302,438,73695,28548,439,73670,88838,440,604,93007,441,51022,23895,442,1286,15183,443,34238,88838,444,51022,28548,445,604,58582,446,55961,97196,447,1286,48564,448,68901,62250,449,15063,50359,450,77207,48564,451,5901,62250,452,51022,58582,453,55961,93007,454,41314,3410,455,2887,3410,456,788,66775,457,54448,76874,458,26354,36951,459,90732,44863,460,40000,19347,461,41314,55502,462,2887,19347,463,37737,78686,464,34238,66775,465,2887,27705,466,1286,50359,467,15063,15183,468,5901,78686,469,34238,15183,470,15063,55502,471,55553,93302,472,55553,55502,473,55553,54324,474,77207,75430,475,15063,57485,476,2887,55672,477,68901,23895,478,90732,15183,479,5901,76874,480,77207,63915,481,29298,66775,482,34238,97196,483,54448,88520,484,41006,81640,485,59858,3410,486,26354,76874,487,15063,48564,488,77207,81640,489,76224,93007,490,34238,81640,491,34238,88520,492,51022,93007,493,54448,36951,494,72179,54324,495,2887,48564,496,37737,93007,497,2887,88520,498,788,57485,499,47616,3410,500,68901,58582,501,47616,66775,502,90732,88520,503,47616,54324,504,2887,27705,505,40000,88520,506,59858,19347,507,73695,50359,508,37737,93007,509,5901,66775,510,41314,76874,511,29298,88520,512,77207,70729,513,51022,50359,514,51022,27705,515,73695,55672,516,72179,28548,517,96778,70729,518,77207,55672,519,34238,50359,520,37737,3410,521,59858,55502,522,47616,81640,523,77207,66775,524,76224,53520,525,89659,28548,526,59858,28548,527,55961,57485,528,5901,93302,529,40000,93007,530,1286,75430,531,5901,70729,532,41314,50359,533,51022,44863,534,37737,93302,535,37211,58582,536,59858,28548,537,1286,54324,538,73695,27705,539,55553,81640,540,47616,81640,541,72179,97196,542,68901,15183,543,90732,97196,544,73695,66775,545,5901,64787,1500,546,26815,54324,598,72613,47718,10,88285,55502,195,45936,46989,78429,777,67559,57335,1,30554,81076,598,56594,53520,820,3823,22054,0,16411,48564,195,45936,39485,78686,948,89043,8029,1,64787,76874,978,89447,73059,0,74443,70729,598,48863,44863,602,87326,28548,979,90910,61090,6,91543,97196,1187,72613,7744,0,10760,70729,339,78938,50359,1188,21286,57190,28718,1189,91543,63915,1225,60664,64384,0,30554,81640,1334,89043,62250,1335,44064,54324,321,68107,55502,1321,87117,28030,1,62250,1336,77333,33009,1]},"awcbb_yhh_fun1":{"variablePool":{"c_f_1":null,"c_f_2":null,"awcbb_yhh_fun2":"awcbb_yhh_fun2","c_f_5":null,"c_f_6":"awcbb_yhh_fun3","c_f_13":"awcbb_yhh_fun4","c_f_20":"awcbb_yhh_fun5","c_f_24":"awcbb_yhh_fun6","c_f_28":"awcbb_yhh_fun7","c_f_38":"awcbb_yhh_fun8","c_f_48":"awcbb_yhh_fun9","c_f_52":"awcbb_yhh_fun10","awcbb_yhh_fun11":"awcbb_yhh_fun11","awcbb_yhh_fun12":"awcbb_yhh_fun12","c_f_71":"awcbb_yhh_fun15","c_f_73":"awcbb_yhh_fun16","c_f_80":"awcbb_yhh_fun17","c_f_82":"awcbb_yhh_fun18","c_f_87":"awcbb_yhh_fun19","c_f_92":"awcbb_yhh_fun20","awcbb_yhh_fun21":"awcbb_yhh_fun21","awcbb_yhh_fun22":"awcbb_yhh_fun22","awcbb_yhh_fun23":"awcbb_yhh_fun23","awcbb_yhh_fun24":"awcbb_yhh_fun24","awcbb_yhh_fun25":"awcbb_yhh_fun25","awcbb_yhh_fun26":"awcbb_yhh_fun26","awcbb_yhh_fun27":"awcbb_yhh_fun27","awcbb_yhh_fun28":"awcbb_yhh_fun28","awcbb_yhh_fun29":"awcbb_yhh_fun29","awcbb_yhh_fun30":"awcbb_yhh_fun30","awcbb_yhh_fun31":"awcbb_yhh_fun31","awcbb_yhh_fun32":"awcbb_yhh_fun32","awcbb_yhh_fun33":"awcbb_yhh_fun33","awcbb_yhh_fun34":"awcbb_yhh_fun34","awcbb_yhh_fun35":"awcbb_yhh_fun35","awcbb_yhh_fun36":"awcbb_yhh_fun36","awcbb_yhh_fun37":"awcbb_yhh_fun37"},"zhili":[50359,4,15183,318,93007,147,57485,146,50359,165,88520,181,66775,314,88520,177,28548,182,55672,188,62250,211,55672,243,75430,310,28548,307,88838,292,62250,261,58582,240,66775,245,44863,252,66775,262,57485,263,19347,267,93302,270,48564,272,78429,279,15183,283,76874,288,3410,289,93007,296,28548,303,55502,304,81640,308,88838,311,63915,317,67122,28548,0,9455,94764,48564,4,20489,16411,28718,5,13505,23895,6,26354,57485,7,15063,76874,8,47616,75430,9,37211,93007,10,34238,93007,11,26354,3410,12,788,58582,13,37737,78686,14,68901,55672,15,1286,78686,16,38646,48564,17,73670,15183,18,2887,55672,19,604,55672,20,72179,36951,21,604,79904,94617,22,78686,179,54324,180,14685,23895,181,89447,4349,1,57190,78429,182,88416,64384,1,59553,76923,2,10120,8,38865,44863,5,14685,93007,188,3823,23997,50359,189,35304,15183,190,17824,18714,70624,45823,11,10760,54324,190,87117,81640,191,21286,75430,192,89447,90240,58470,2,14720,6,14685,57485,211,27645,2112,0,74443,3410,240,19676,97196,191,17824,63915,241,1314,44863,245,43015,80220,33245,48564,240,51073,55502,191,89447,75430,244,56594,53520,252,343,71057,38865,63915,240,14453,15183,191,87326,78429,253,33245,48564,262,3823,55151,88184,48564,240,3823,28548,191,67604,88838,239,39485,70729,263,72613,41423,14685,66775,240,27645,50359,191,18442,66775,264,88285,97196,267,89043,22381,33245,19347,240,343,55502,191,43015,44863,268,47706,93302,270,1101,5191,47706,88520,240,89043,50359,191,18442,23895,271,44064,93007,272,94591,80220,26815,23895,240,20489,75430,191,3823,55502,259,1314,58582,279,78938,42870,96905,58582,240,94591,97196,266,47706,88520,283,18442,5191,74443,97196,240,20489,28548,282,56594,93007,288,89043,42429,38865,81640,240,72613,15183,190,57190,78429,289,43015,55151,1729,23895,240,27645,78686,190,67559,54324,191,87326,66775,241,99240,55672,296,94197,1857,46989,58582,240,87326,15183,190,72613,15183,191,60664,66775,253,39485,44863,303,67559,13141,88184,53520,240,1101,97196,190,90910,93302,191,87117,54324,239,96905,97196,304,54271,42429,88184,57485,240,343,93302,190,87117,48564,191,94591,15183,264,14685,28548,308,45936,46431,38865,78429,240,54271,28548,190,78938,55502,191,343,44863,268,79904,3410,311,88416,23997,75425,58582,240,27645,54324,190,27645,54324,191,87117,55502,271,1729,15183,240,17824,23895,191,67604,75430,271,88416,48373,44064,48564,240,94197,44863,190,78938,78686,191,27645,88520,259,88184,70729,240,18442,93302,191,54271,97196,259,14453,71057,35304,78686,240,1101,62250,190,19676,58582,266,96905,78429,317,19676,27777,1314,55672,240,3823,51705]},"awcbb_yhh_fun2":{"variablePool":{"c_f_3":null,"c_f_4":null},"zhili":[40277,88520,1,70729,2,81364,88838,3,91543,76874,2,21286,1729,93302,1,72613,92857,11372,88290]},"awcbb_yhh_fun3":{"variablePool":{"c_f_7":null,"c_f_8":null,"c_f_9":null,"c_f_10":null,"c_f_11":null,"c_f_12":null},"zhili":[83923,78686,23,63915,24,58582,25,93007,26,27705,27,36951,28,9455,44064,93007,24,46989,27705,24,67559,35304,44863,23,90910,41276,55672,5,87326,16079,2,47706,48564,26,90910,46989,57485,28,48863,44064,44863,5,78938,70959,2,1314,58582,5,54271,8029,2,71057,30554,62250,27,17824,19347,29,42458,99240,81640,24,45936,62360,46989,53520,27,90910,57190,28548,24,51073,60902,89680,41276,27705,25,68107,44064,58582,5,67604,8029,2,74047]},"awcbb_yhh_fun4":{"variablePool":{"c_f_14":null,"c_f_15":null,"c_f_16":null,"c_f_17":null,"c_f_18":null,"c_f_19":null},"zhili":[7580,55672,30,55672,31,71825,94764,88520,30,20489,78686,32,87117,57190,34739,33,33245,75430,30,20489,54324,34,72613,79904,80153,35,56594,63915,30,21286,50359,36,1101,39485,46696,37,35304,3410,30,68107,23895,38,72613,39485,72245,39,75425,53520,33,44863,32,48564,40,41276,81640,31,43015,55672,32,67559,88285,55502,39,90910,96905,78429,35,89447,76648,11372,67057,75430,37,89447,1314,93302,35,21286,46482,45256,35484,35356,45256,46989,78429,33,78938,93063,31069,39485,15183,33,88520,32,91543,55672,35,68107,19347,41,33245,48564,33,27645,1194,78686,42,10760,78429,33,19676,77661,70675,49904,16370,1857,41276,54324,39,3410,32,54324,43,75425,53520,31,90910,27705,34,51073,30554,36951,37,17824,30554,76874,33,67559,24413,18508,67057,48564,35,87117,99240,93302,33,43015,97927,5972,828,463,9300,30554,58582,39,343,67423,41598,10760,36951,39,75430,32,74443,50359,33,48863,19347,44,64787,3410,39,78938,46748,54324,45,33245,3410,39,94591,16711,75712,82493,92283,46431,64787,44863,37,70729,32,27705,46,30554,66775,31,45936,50359,36,89043,38865,78429,35,87117,35304,78429,39,48863,94292,31572,16411,62250,33,18442,1729,93302,39,43015,2147,30693,67423,67423,81394,31581,88520,37,87326,1987,78041,94764,23895,37,48564,32,88285,15183,39,1101,97196,47,64787,19347,37,45936,52731,62250,48,31581,75430,37,67559,27029,29915,67423,36713,23997,1729,15183,35,15183,32,57485,49,38865,3410,31,68107,15183,38,87117,67057,81640,33,89447,67057,97196,37,19676,34018,18508,64787,28548,39,45936,33245,44863,37,343,18508,67834,71340,1492,30693,88285,53520,35,87326,5505,84829,26815,75430,35,57485,32,10760,88520,37,17824,53520,50,1314,88838,35,90910,25964,36951,51,96905,93302,35,68107,54529,30693,67423,60504,71057,16411,70729,33,53520,32,93007,52,10760,88520,31,51073,23895,53,67559,91543,27705,39,20489,16411,36951,35,72613,94292,36025,16411,44863,37,77333,1729,44863,35,1101,97927,10124,94841,9863,30693,31581,23895,33,67559,9359,97228,35304,15183,33,63915,32,47706,19347,35,67559,70729,41,1729,3410,33,77333,39406,78686,42,88184,3410,33,67559,60902,29915,34701,2713,41423,26815,55502,39,93302,32,75430,54,75425,27705,31,43015,44863,55,94591,94764,78686,37,67559,33245,66775,33,94197,62678,73822,28602,78429,35,87117,74443,54324,33,48863,73822,70675,14353,9359,67834,41276,54324,39,67604,82493,19293,74443,55672,39,53520,32,1314,66775,33,94591,3410,44,14685,54324,39,94197,1194,55672,45,94764,36951,39,68107,16751,2713,35484,36713,66902,47706,66775,37,88838,32,97196,56,26815,50359,31,343,23895,57,67604,74443,81640,35,20489,75425,48564,39,21286,62836,42053,39485,88838,33,343,28602,78686,39,78938,88559,89680,49988,42458,24974,38865,75430,37,1101,1987,26645,38865,50359,37,76874,32,38865,44863,39,17824,55502,47,41276,23895,37,72613,39860,44863,48,30554,48564,37,343,9764,44649,828,29915,23997,1729,81640,35,3410,32,28548,58,41276,55672,31,90910,75430,42,78938,14685,97196,33,54271,75425,93302,37,43015,38117,31572,39485,50359,39,45936,1314,23895,37,54271,13485,1879,60598,463,36713,26815,88520,35,67604,14353,67240,99240,23895,35,3410,32,10760,70729,37,19676,62250,50,28602,78429,35,60664,96423,50359,51,99240,44863,35,78938,16711,44649,93063,36561,1857,94764,15183,33,15183,32,15183,59,67057,88520,31,88416,81640,60,72613,79904,48564,39,78938,57190,75430,35,60664,97472,12713,56594,36951,37,68107,16411,50359,35,343,38823,45256,5715,63317,71766,16411,55672,33,87117,38224,19922,74443,27705,33,93302,32,14685,88520,35,3823,28548,41,1729,50359,33,18442,84586,58582,42,44064,75430,33,67559,91558,6282,63317,30693,12453,1729,53520,39,3410,32,44863,61,26815,48564,31,14453,19347,62,45936,1729,15183,37,54271,31581,57485,33,27645,97472,46230,75425,93302,35,88416,30554,23895,33,77333,14515,71766,70111,44459,60504,31581,55502,39,78938,82493,41598,67057,70729,39,93302,32,10760,66775,33,60664,58582,44,94764,48564,39,68107,59891,62250,45,30554,53520,39,48863,54529,24972,34701,10124,4492,64787,44863,37,78686,32,78686,63,88285,15183,31,94591,70729,50,51073,28602,55672,35,51073,79904,62250,39,14453,70297,42053,14685,78429,33,88416,91543,19347,39,19676,12713,67834,94841,95018,44649,39485,78429,37,67559,34701,74158,46989,81640,37,36951,32,16411,78686,39,60664,53520,47,33245,28548,37,60664,94287,88520,48,28602,58582,37,3823,45464,92283,38765,98823,15881,88184,81640,35,50359,32,55502,64,28602,27705,31,87117,55502,65,94591,56594,54324,33,67604,64787,78686,37,3823,85278,97927,14685,55672,39,20489,67057,57485,37,88416,73822,29915,5715,44596,45256,33245,70729,35,343,94841,75112,30554,81640,35,76874,32,35304,88838,37,89447,55672,50,38865,88520,35,3823,39860,78429,51,31581,23895,35,67604,27029,98823,38765,71766,71057,33245,55502,33,28548,32,28548,66,1314,58582,31,54271,53520,45,48863,33245,75430,39,60664,64787,78686,35,89043,49221,46230,41276,44863,37,54271,1729,70729,35,43015,31572,6282,71104,11855,24972,35304,27705,33,94197,70302,59812,10760,27705,33,66775,32,75425,27705,35,72613,3410,41,64787,78429,33,18442,59891,63915,42,75425,54324,33,20489,3428,87519,5715,41325,15881,47706,76874,39,88520,32,93302,67,57190,81640,31,43015,19347,68,67604,1729,88520,37,90910,28602,3410,33,67559,85278,13485,46989,48564,35,20489,26815,66775,33,94591,32236,60504,49988,91706,29915,91543,70729,39,67604,38765,31069,41276,53520,39,70729,32,94764,58582,33,90910,50359,44,46989,58582,39,67604,25960,78429,45,46989,15183,39,14453,9764,36561,11855,71766,78763,31581,3410,37,93007,32,27705,69,46989,15183,31,89043,63915,70,54271,33245,88838,35,51073,46989,36951,39,1101,71383,17078,67057,78686,33,78938,35304,88838,39,94197,42053,71766,70111,41666,24972,88184,88838,37,14453,63317,71652,16411,81640,37,44863,32,94764,50359,39,3823,36951,47,33245,63915,37,88416,59891,97196,48,31581,44863,37,1101,54996,24972,71104,26326,42870,31581,88520,35,53520,32,88838,71,57190,55672,31,54271,53520,47,87326,46989,58582,33,343,96905,54324,37,51073,52164,32236,94764,58582,39,43015,46989,70729,37,1101,88559,29915,17681,17681,60504,99240,54324,35,21286,45493,31069,75425,75430,35,88520,32,1729,57485,37,18442,66775,50,30554,55672,35,87326,26028,36951,51,41276,19347,35,94197,28616,16370,54331,41325,48373,74443,76874,33,97196,32,78429,72,96905,55672,31,89043,93302,34,88416,10760,36951,39,1101,31163,41276,88838,37,51073,2147,99240,19347,39,77333,64787,63915,35,20489,13485,6282,71340,92472,20948,94764,58582,33,94591,14353,75112,39485,23895,33,3410,32,16411,88520,35,54271,44863,73,79904,81640,33,48863,9683,97196,55,1729,66775,33,90910,42888,75712,5505,87519,66902,67057,54324,39,62250,32,36951,74,31581,54324,31,94591,50359,57,94591,57190,27705,37,51073,60177,74443,53520,35,20489,41244,67057,54324,37,19676,44064,3410,33,67559,17078,16370,828,90439,89680,99240,50359,39,67604,54097,73914,88285,44863,39,50359,32,41276,78686,33,88416,76874,75,75425,88520,39,87117,45243,88520,62,33245,50359,39,94591,45464,1879,49988,55837,27777,14685,93007,37,88838,32,27705,76,1729,58582,31,77333,15183,65,87117,33245,53520,35,45936,16264,41276,44863,33,87326,78764,33245,70729,35,48863,1314,75430,39,1101,97927,20948,34701,54097,36713,31581,53520,37,48863,82493,75286,56594,57485,37,70729,32,44064,54324,39,17824,81640,77,39485,27705,37,78938,58619,58582,70,96905,62250,37,94591,45227,81394,5715,71255,42429,44064,78429,35,36951,32,19347,78,30554,93007,31,43015,19347,32,67604,99240,78686,33,67604,13939,26815,36951,39,21286,93769,44064,57485,33,87326,74443,97196,37,88416,2147,87519,17681,87391,2713,31581,55502,35,90910,70111,84829,41276,15183,35,55502,32,39485,62250,37,94197,76874,45,75425,15183,35,68107,94287,15183,44,10760,48564,35,89043,91558,87519,38224,6282,74411,57190,88838,33,55672,32,55502,79,75425,70729,31,78938,97196,55,94591,10760,58582,39,78938,24657,88285,81640,37,67559,46230,41276,78686,39,77333,67057,44863,35,27645,2147,36713,49988,92472,24974,91543,58582,33,45936,92857,30099,14685,62250,33,23895,32,41276,54324,35,60664,93007,73,44064,50359,33,27645,2128,88838,55,88285,48564,33,17824,28616,41325,92857,1879,46431,88184,15183,39,54324,32,50359,80,99240,54324,31,14453,54324,50,89043,1729,76874,37,1101,38117,38865,19347,35,94591,97927,14685,48564,37,67559,91543,62250,33,343,25269,87519,70111,93063,60504,47706,55502,39,78938,94841,75453,67057,88520,39,88520,32,67057,23895,33,45936,55672,75,14685,36951,39,90910,45243,48564,62,44064,93302,39,17824,27691,60504,93006,41325,5932,79904,88520,37,63915,32,3410,81,14685,3410,31,94197,88838,47,20489,99240,48564,35,3823,58850,14685,50359,33,60664,17078,57190,15183,35,343,46989,44863,39,14453,51407,24972,38224,32916,2713,39485,53520,37,87117,38765,41598,64787,44863,37,36951,32,26815,54324,39,19676,66775,77,46989,19347,37,27645,14809,93302,70,56594,55672,37,60664,93694,5972,60598,70675,80220,39485,36951,35,28548,32,54324,82,88285,55502,31,77333,44863,53,20489,35304,57485,33,87326,701,28602,88520,39,51073,32236,38865,76874,33,51073,99240,78429,37,20489,12713,29915,35484,2476,2713,46989,63915,35,20489,54097,80829,10760,66775,35,44863,32,88285,76874,37,19676,78429,45,1729,97196,35,90910,58619,3410,44,28602,78429,35,343,9764,67834,67423,75712,46431,74443,93007,33,88520,32,78429,83,47706,93007,31,27645,88838,62,1101,75425,70729,39,1101,43154,47706,44863,37,45936,88559,1314,36951,39,45936,74443,44863,35,343,38823,26326,60598,35484,29915,79904,70729,33,51073,93063,26645,10760,93007,33,44863,32,99240,55672,35,1101,70729,73,28602,53520,33,54271,66005,78686,55,14685,50359,33,88416,54529,55837,67423,41325,13606,35304,88520,39,3410,32,27705,84,74443,19347,31,3823,63915,70,94197,47706,55502,37,60664,67635,28602,55672,35,89447,41244,28602,53520,37,89043,33245,55672,33,20489,11372,70675,63317,42458,41325,94764,44863,39,89447,5505,77074,26815,55672,39,55502,32,75425,44863,33,94197,58582,75,88184,23895,39,88416,13157,23895,62,35304,28548,39,48863,9764,71255,54097,70675,46431,10760,81640,37,78686,32,75430,85,47706,28548,31,21286,88838,38,78938,33245,15183,35,90910,58850,41276,70729,33,343,97927,38865,50359,35,77333,1729,55502,39,51073,38823,5972,93063,21386,2713,38865,78686,37,48863,49904,46358,16411,44863,37,88520,32,75425,48564,39,87326,81640,77,67057,78429,37,14453,39406,66775,70,1729,44863,37,19676,91558,89680,60598,81394,12453,39485,70729,35,55672,32,97196,86,41276,93007,31,343,15183,60,77333,64787,81640,33,72613,43154,91543,36951,39,1101,65843,14685,93302,33,1101,79904,58582,37,67604,73822,36713,45493,17681,44649,91543,76874,35,20489,38224,30099,67057,70729,35,53520,32,1729,44863,37,54271,58582,45,38865,27705,35,87326,77890,23895,44,33245,58582,35,68107,64179,44649,71104,71255,12453,88184,97196,33,53520,32,27705,87,88184,23895,31,72613,75430,68,72613,91543,88838,39,1101,71383,41276,55672,37,51073,97895,91543,76874,39,43015,16411,81640,35,20489,40845,67834,94841,9863,71766,57190,55672,33,77333,54331,46358,38865,78429,33,93007,32,31581,75430,35,68107,93302,73,94764,28548,33,87117,52731,48564,55,88285,57485,33,87117,54529,92283,5715,29915,1857,16411,15183,39,81640,32,55502,88,14685,54324,31,17824,70729,36,78938,30554,15183,37,43015,60177,79904,81640,35,60664,55281,67057,3410,37,51073,56594,88838,33,3823,51407,55837,67423,24173,24972,10760,19347,39,51073,60598,35275,14685,55672,39,50359,32,74443,78429,33,43015,66775,75,10760,3410,39,19676,77890,19347,62,38865,93007,39,3823,97256,87519,9359,24972,57221,47706,93302,37,53520,32,88838,89,28602,78686,31,87326,88520,42,78938,1314,28548,35,18442,62678,74443,78429,33,87326,40845,10760,76874,35,45936,94764,93302,39,54271,18462,71255,35484,60598,6282,91543,63915,37,67559,71340,30099,41276,88520,37,19347,32,57190,93007,39,20489,97196,77,39485,54324,37,48863,46748,93007,70,91543,23895,37,14453,91558,71766,35484,16370,48910,33245,54324,35,55672,32,55502,90,1729,81640,31,78938,19347,45,89043,46989,53520,33,78938,36568,96905,28548,39,14453,97927,56594,27705,33,68107,47706,62250,37,54271,13485,92283,93063,95018,60504,91543,97196,35,67604,60598,67240,28602,57485,35,15183,32,57190,97196,37,67559,50359,45,64787,15183,35,18442,59891,58582,44,56594,78429,35,18442,64179,89680,35484,36713,57221,79904,53520,33,93007,32,58582,91,38865,78429,31,67559,28548,55,14453,38865,62250,39,90910,94764,27705,37,48863,44064,36951,35,48863,77828,44203,93063,22875,98823,56594,57485,33,94197,35484,75286,96905,19347,33,44863,32,75425,36951,35,68107,66775,92,41276,50359,33,48863,25960,57485,53,91543,50359,33,18442,54996,6282,38765,89680,78763,94764,44863,39,27705,32,28548,93,31581,88520,31,72613,58582,60,343,16411,62250,37,67559,79904,55672,35,54271,46989,28548,33,343,57338,19896,828,21386,5972,56594,53520,39,48863,82493,19922,28602,93007,39,54324,32,88184,88838,33,90910,88520,94,96905,57485,39,20489,41303,36951,65,96905,70729,39,18442,11499,10124,67423,70675,13606,1729,55672,37,48564,32,81640,95,47706,55672,31,90910,81640,65,45936,14685,48564,35,43015,47706,78429,33,87117,67057,27705,39,88416,78448,34041,828,49904,36561,67057,15183,37,17824,11855,67240,79904,48564,37,23895,32,16411,75430,39,89447,28548,96,91543,78429,37,68107,22385,93302,96,1314,28548,37,45936,4753,36713,63317,75712,78763,16411,27705,35,36951,32,28548,97,39485,58582,31,94197,66775,70,78938,88285,81640,33,68107,96905,55672,39,68107,74443,36951,37,20489,63503,63503,828,9863,9300,91543,66775,35,87117,5505,84829,88184,62250,35,54324,32,26815,62250,37,51073,50359,62,33245,55672,35,94591,25964,66775,75,30554,62250,35,18442,11007,60504,49865,9300,15881,99240,15183,33,88520,32,55672,98,74443,44863,31,60664,58582,34,14453,33245,54324,39,94591,10760,44863,37,3823,46989,81640,35,90910,37894,73693,71104,73043,30693,88285,76874,33,54271,93006,73914,33245,81640,33,93007,32,16411,58582,35,27645,78429,92,88184,3410,33,3823,42991,50359,53,74443,81640,33,89043,27029,5972,49865,75712,23997,88285,55502,39,36951,32,3410,99,1729,54324,31,45936,93302,53,43015,79904,88520,37,94591,35304,27705,35,51073,46989,57485,33,88416,97796,57420,82493,49865,92283,31581,88838,39,20489,17681,74753,44064,27705,39,27705,32,88285,36951,33,19676,70729,94,31581,75430,39,60664,66005,55502,65,57190,57485,39,90910,42888,98823,9359,92283,52926,75425,93007,37,57485,32,75430,100,88184,75430,31,51073,81640,42,87326,91543,3410,35,87117,79904,93007,33,72613,46989,93007,39,51073,44203,4201,82493,1492,71766,64787,93007,37,45936,54097,97228,96905,78686,37,27705,32,39485,78429,39,67604,63915,96,88285,55672,37,90910,72753,19347,96,38865,3410,37,3823,18385,70675,70111,36713,74411,91543,44863,35,88520,32,76874,101,14685,15183,31,77333,66775,50,51073,35304,50359,33,88416,74443,81640,39,89043,88285,54324,37,17824,33406,57338,45493,90439,44649,14685,93302,35,14453,1987,97228,67057,58582,35,93302,32,64787,62250,37,20489,93007,62,31581,57485,35,45936,2128,19347,75,99240,58582,35,77333,18385,60504,70111,71255,80220,91543,78686,33,44863,32,78429,102,16411,50359,31,343,23895,68,78938,79904,88838,39,45936,79904,15183,37,87117,1729,70729,35,19676,57420,61159,5505,11855,16370,28602,66775,33,51073,54331,75935,30554,55502,33,58582,32,28602,19347,35,45936,88520,92,79904,48564,33,77333,9683,36951,53,99240,57485,33,72613,4728,98823,14353,70675,13606,94764,55672,39,70729,32,58582,103,57190,93007,31,21286,55502,32,14453,28602,63915,37,14453,88184,44863,35,3823,16411,48564,33,72613,34041,16259,45493,44459,45256,88184,63915,39,89043,9359,78041,33245,62250,39,53520,32,1314,28548,33,68107,78429,94,88184,57485,39,20489,58404,58582,65,46989,55672,39,67604,30624,30693,1987,9300,74411,41276,19347,37,3410,32,81640,104,75425,93302,31,21286,55502,38,78938,56594,70729,35,60664,67057,19347,33,20489,88184,88520,39,88416,33406,77828,63317,48965,92283,1314,66775,37,27645,828,20325,75425,75430,37,81640,32,88184,27705,39,343,55672,96,91543,66775,37,1101,45243,88838,96,16411,57485,37,27645,17045,26326,49904,75712,80220,91543,28548,35,76874,32,78686,105,64787,93007,31,89043,75430,57,45936,67057,97196,33,18442,67057,44863,39,20489,64787,53520,37,43015,98963,91698,71340,71104,24974,64787,58582,35,20489,63317,71652,67057,78686,35,55672,32,38865,93007,37,14453,76874,62,99240,15183,35,89447,58619,88838,75,26815,93007,35,343,77661,10124,93006,29915,13606,88285,28548,33,36951,32,75430,106,39485,62250,31,60664,63915,62,67559,64787,57485,39,72613,39485,66775,37,3823,75425,78686,35,27645,74905,73693,71340,73043,44649,47706,3410,33,21286,93006,19922,14685,36951,33,81640,32,99240,19347,35,343,78429,92,96905,55502,33,21286,14809,66775,53,35304,44863,33,18442,27691,71766,34701,71766,52926,96905,93302,39,78429,32,55672,107,35304,15183,31,21286,23895,45,77333,35304,36951,37,60664,44064,44863,35,72613,38865,88838,33,14453,93829,91385,54331,62749,36713,26815,44863,39,3823,54097,26645,56594,48564,39,62250,32,16411,27705,33,51073,55672,94,38865,55502,39,45936,9683,15183,65,28602,23895,39,88416,45464,67834,34701,29915,57221,94764,3410,37,54324,32,63915,108,99240,23895,31,45936,88838,47,72613,38865,78686,35,54271,64787,53520,33,17824,1729,53520,39,17824,91698,73693,71340,54097,26326,64787,55502,37,54271,38224,97228,1729,70729,37,66775,32,31581,63915,39,45936,53520,96,88184,93302,37,77333,25964,36951,96,1314,23895,37,94197,27691,98823,17681,5972,80220,64787,78429,35,97196,32,76874,109,88184,93302,31,19676,44863,36,343,28602,76874,33,21286,35304,28548,39,14453,1314,58582,37,94591,19896,73693,63317,44596,98823,74443,66775,35,20489,17681,71652,33245,78429,35,88520,32,99240,15183,37,68107,88838,62,44064,76874,35,54271,96423,36951,75,67057,23895,35,14453,45227,16370,45493,20948,12453,88184,58582,33,28548,32,48564,110,35304,62250,31,67559,63915,32,94591,1314,55502,39,27645,95955,35304,27705,35,3823,75712,26815,58582,37,19676,16259,17681,89925,41325,46989,55672,33,60664,54331,77074,47706,88520,33,88520,32,30554,28548,35,54271,54324,111,99240,78429,33,89447,9276,44863,57,33245,53520,33,77333,28616,60504,63317,6282,52926,26815,88520,39,66775,32,62250,112,33245,63915,31,14453,88520,42,43015,1729,62250,37,18442,60177,30554,88520,33,89043,36713,46989,76874,35,343,63503,67423,5715,70675,91543,62250,39,18442,11855,74214,96905,93302,39,66775,32,67057,93302,33,87117,66775,51,14685,3410,39,78938,39406,55502,50,88285,78429,39,67604,54529,67834,49988,81394,42870,91543,3410,37,19347,32,55502,113,74443,88838,31,87117,57485,70,88416,74443,78429,35,21286,60177,67057,78686,39,54271,24974,14685,97196,33,43015,44203,54097,21386,6282,94764,50359,37,43015,14353,74158,14685,3410,37,44863,32,67057,48564,39,60664,36951,48,16411,44863,37,17824,9276,78429,47,91543,97196,37,21286,62253,29915,38224,44649,57221,30554,44863,35,27705,32,57485,114,88285,66775,31,67604,78686,55,43015,35304,15183,33,27645,62678,26815,27705,37,17824,55837,44064,78686,39,68107,19896,45493,92472,89680,64787,75430,35,51073,45493,73914,41276,75430,35,27705,32,41276,93302,37,60664,58582,65,31581,3410,35,90910,26028,70729,94,57190,53520,35,17824,6934,36561,71104,6282,66902,28602,93007,33,36951,32,97196,115,75425,78686,31,94197,93302,45,27645,1729,27705,39,14453,58850,56594,48564,35,54271,71255,33245,23895,37,94591,55735,34701,94841,71255,28602,88520,33,89447,1987,19922,56594,28548,33,76874,32,28602,81640,35,68107,70729,111,74443,57485,33,87117,14809,88838,57,28602,36951,33,94591,45464,36561,38224,45256,55151,88285,57485,39,81640,32,66775,116,35304,28548,31,94591,53520,38,89043,56594,88520,37,68107,34018,35304,62250,33,94591,98823,10760,76874,35,17824,97796,17681,77651,92283,14685,66775,39,14453,1987,71652,67057,23895,39,97196,32,56594,44863,33,87117,55672,51,14685,15183,39,14453,96423,81640,50,74443,75430,39,1101,9764,75712,17681,55837,1857,88285,63915,37,36951,32,63915,117,46989,76874,31,51073,19347,50,17824,33245,28548,35,90910,24657,57190,63915,39,17824,36713,1314,55502,33,60664,19896,45493,89925,10124,94764,70729,37,78938,45493,30099,35304,3410,37,23895,32,46989,28548,39,17824,19347,48,79904,44863,37,17824,46748,97196,47,99240,70729,37,17824,27029,70675,92857,36713,20586,99240,58582,35,55672,32,55672,118,31581,76874,31,68107,55502,34,19676,46989,81640,33,77333,13939,38865,81640,37,18442,6282,16411,75430,39,48863,33406,54331,9863,24972,94764,66775,35,18442,38224,67240,64787,70729,35,88520,32,94764,36951,37,87326,97196,65,56594,55672,35,27645,26028,28548,94,35304,81640,35,94591,9764,29915,82493,98823,42429,39485,58582,33,76874,32,44863,119,26815,66775,31,17824,63915,60,19676,64787,78429,39,72613,43154,1314,63915,35,54271,9300,26815,78429,37,89043,32539,94841,38765,87519,79904,36951,33,3823,54097,73914,33245,63915,33,97196,32,64787,48564,35,54271,3410,111,74443,88520,33,20489,72753,44863,57,35304,88838,33,89043,54529,2713,5715,89680,75672,57190,50359,39,88520,32,55672,120,99240,58582,31,94197,44863,47,3823,26815,76874,37,88416,24657,99240,58582,33,90910,1879,26815,36951,35,1101,57420,45493,19349,24974,26815,78429,39,89447,70302,67240,30554,36951,39,28548,32,46989,88838,33,48863,81640,51,94764,93007,39,3823,22385,44863,50,88184,93302,39,67559,97256,67834,38765,81394,52926,30554,50359,37,88520,32,28548,121,99240,97196,31,60664,19347,57,72613,75425,63915,35,3823,71383,94764,23895,39,90910,71766,33245,54324,33,72613,34041,70111,59533,26326,16411,58582,37,88416,828,31069,79904,88838,37,44863,32,44064,44863,39,72613,81640,48,67057,93007,37,27645,41303,28548,47,39485,66775,37,67604,77661,92283,82493,41325,55151,39485,19347,35,36951,32,93302,122,31581,88838,31,43015,19347,68,51073,10760,15183,33,88416,24413,94764,19347,37,89043,75712,1729,75430,39,343,91385,49988,67423,9300,46989,19347,35,17824,1987,75935,33245,78429,35,19347,32,94764,50359,37,68107,44863,65,33245,75430,35,77333,45243,19347,94,57190,27705,35,17824,32351,98823,38765,9300,1857,10760,81640,33,53520,32,15183,123,30554,78686,31,67604,50359,53,18442,88184,62250,39,54271,52164,46989,53520,35,20489,24974,64787,63915,37,27645,17,38224,35356,5972,75425,70729,33,19676,67423,70007,33245,23895,33,23895,32,67057,57485,35,90910,55672,111,56594,58582,33,43015,41303,76874,57,16411,19347,33,18442,28616,71255,54097,24974,52926,28602,36951,39,50359,32,23895,124,39485,93007,31,87117,54324,65,1101,99240,15183,37,18442,43154,79904,3410,33,17824,41325,10760,75430,35,77333,2798,70302,89925,41325,94764,44863,39,18442,45493,19293,56594,78686,39,62250,32,64787,19347,33,19676,88520,51,94764,62250,39,60664,86028,70729,50,14685,57485,39,343,16751,29915,71340,89680,13141,41276,23895,37,63915,32,27705,125,44064,55502,31,72613,58582,36,88416,28602,36951,35,3823,95955,67057,88520,39,77333,16370,38865,88520,33,17824,91385,49988,49904,36713,75425,93302,37,51073,34701,74753,56594,48564,37,54324,32,1729,66775,39,77333,97196,48,16411,36951,37,88416,22385,54324,47,96905,53520,37,94591,28616,2713,828,45256,55151,94764,88838,35,70729,32,55502,126,47706,36951,31,87326,28548,62,88416,94764,93302,33,27645,36568,79904,48564,37,89447,30693,35304,97196,39,60664,37894,70111,35356,60504,96905,97196,35,89043,54331,31069,14685,81640,35,97196,32,41276,75430,37,20489,70729,65,28602,48564,35,1101,9276,54324,94,67057,27705,35,67604,11007,36561,60598,55837,75672,57190,44863,30,60664,75430,32,28548,32,56594,58582,30,94197,93302,32,90910,38865,76874,33,67559,5715,45256,71057,30554,81640,30,88416,53520,34,58582,32,47706,55672,30,89043,75430,34,60664,31581,62250,35,68107,93063,44649,71057,56594,36951,30,89043,53520,36,88520,32,96905,70729,30,60664,53520,36,19676,96905,55672,37,78938,60598,55837,12453,39485,78429,30,54271,63915,38,63915,32,46989,28548,30,72613,23895,38,89447,79904,78429,39,94197,35484,9300,1857]},"awcbb_yhh_fun5":{"variablePool":{"c_f_21":null,"c_f_22":null,"c_f_23":null},"zhili":[28319,93007,127,4208,49609,44064,1500,128,75425,62250,129,78686,32,1857,93007,130,74443,36951,129,87117,40257,68909,98,26815,70729,128,343,81640,36,88285,88520,129,67604,46826,23895,131,57485,38,31581,53520,129,3823,5715,47706,75430,127,88416,3410,132,89043,4349,1,91558,55672,96,81640,36,41276,48564,129,43015,93006,46989,54324,127,77333,44863,132,54271,4349,1,60667,93302,60,19347,34,96905,78429,129,90910,49865,88285,70729,127,19676,62250,132,78938,4349,1,54996,96905,27705,129,54271,74443,66775,127,89447,78429,132,87326,8600,1,49904,38224,34701,57221,74443,57485,129,50359,53,79904,58582,129,67559,67423,74214,42071,-107,96905,70729,128,89447,36227]},"awcbb_yhh_fun6":{"variablePool":{"c_f_25":null,"c_f_26":null,"c_f_27":null},"zhili":[96327,93302,133,6103,88750,35304,50671,134,35304,36951,135,78429,32,89033,63915,130,1314,23895,135,88416,18250,92665,82,39485,58582,134,14453,93007,36,1314,54324,135,67604,76385,55502,131,10760,78429,133,21286,3410,38,16411,93007,135,90910,11855,68107,9764,15183,96,67057,62250,133,67559,19347,36,91543,57485,135,94197,1987,77333,77661,75430,60,67057,44863,133,94197,62250,34,94764,58582,135,87117,828,14453,77661,31581,54324,133,94197,94764,93007,135,21286,67559,49904,63317,14353,5191,91543,57485,135,93007,53,88285,78429,135,21286,49904,87779,99573,-91,75425,55502,134,89043,10701]},"awcbb_yhh_fun7":{"variablePool":{"c_f_29":null,"c_f_30":null,"c_f_31":null,"c_f_32":null,"c_f_33":null,"c_f_34":null,"c_f_35":null,"c_f_36":null,"c_f_37":null},"zhili":[6310,50359,136,47861,38865,97196,136,18442,97196,137,72613,1729,11000,138,81389,88838,139,2887,66775,140,27447,73695,48564,141,77972,604,55502,142,15063,64787,1500,143,26815,88520,144,53520,130,41423,74443,3410,138,48863,38865,36951,144,89447,33585,92665,49,47706,55672,143,27645,15183,130,88184,88838,144,3823,61584,88184,55672,144,343,28602,78429,136,48863,66775,145,54271,87322,2,64787,50359,146,14453,90670,1,91543,75430,147,54271,33310,2,41276,28548,144,54324,130,96905,15183,144,51073,45493,19293,20381,-60,33245,66775,136,93007,130,96905,15183,144,48863,90439,26815,93007,136,27645,55672,145,60664,89716,1,48373,26815,58582,148,67057,75430,136,78938,75430,137,94197,13141,67057,15183,149,58907,93302,32,29298,88520,32,72179,66775,32,96778,63915,32,37737,66775,32,5901,53520,32,54448,55502,32,40000,23895,32,26354,15183,32,788,75430,32,55961,63915,32,1286,88520,32,51022,70729,32,38646,44863,32,68901,50359,32,2887,88520,32,77207,71057,1314,58582,144,70729,32,48373,31581,44863,148,27645,74443,3410,144,45936,9561,36903,62,79904,93302,149,87326,3410,36,35304,28548,144,89043,11170,64787,48564,149,48863,66775,36,35304,23895,144,87326,84082,90910,78429,38,88520,53,30554,53520,144,343,95934,77661,67057,97196,144,51073,88184,75430,136,72613,93302,132,18442,7744,1,60902,75712,97605,10760,53520,144,23895,34,74443,48564,144,72613,49904,78041,99573,-73,64787,57485,149,14453,97196,36,26815,15183,144,1101,2681,47706,81640,149,3823,50359,36,46989,23895,144,19676,35577,21286,78686,38,48564,53,56594,19347,144,19676,54239,64179,78429,150,16711,67834,19922,23895,151,99240,76874,144,67559,19363,68818,2,23615,53,33245,53520,143,89447,47706,23895,149,17824,94764,27705,147,54271,5044,2,96905,63915,144,88520,32,41423,88520,96,94764,70729,144,89043,69555,24066,24,56594,58582,149,68107,91543,19347,144,43015,55502,32,20586,47706,93302,144,44863,34,57190,88838,144,14453,71104,75453,27026,-33,1729,58582,152,88838,60,41276,19347,138,21286,76145,55151,75425,19347,152,22715,153,154,28548,96,44064,27705,152,72613,93302,155,27645,57335,1,15183,156,1101,9595,1,4492,56594,50359,157,74443,53520,152,87326,88838,36,3823,62250,96,67057,55502,158,48863,33009,2,75672,16411,28548,159,10760,28548,152,94591,78429,34,90910,55502,96,33245,62250,158,88416,17274,2,11286,2,58582,32,48910,33245,23895,149,77333,78686,70,91543,76874,157,78938,1857,39485,28548,149,67559,76874,47,91543,88520,159,78938,55151,47706,63915,143,94197,79904,97196,149,17824,35304,19347,147,45936,76039,2,75425,54324,143,20489,41090]},"awcbb_yhh_fun8":{"variablePool":{"c_f_39":null,"c_f_40":null,"c_f_41":null,"c_f_42":null,"c_f_43":null,"c_f_44":null,"c_f_45":null,"c_f_46":null,"c_f_47":null},"zhili":[80609,23895,160,58382,14685,81640,160,87326,63915,137,54271,75425,60609,161,15569,50359,139,73695,55502,140,34348,29298,63915,141,72456,96778,55672,142,1286,57190,6432,162,1314,88520,163,76874,130,78763,57190,55502,161,54271,79904,75430,163,27645,23365,95576,49,64787,19347,162,60664,48564,130,99240,27705,163,72613,56335,56594,58582,163,45936,14685,28548,160,14453,54324,164,17824,90670,2,33245,58582,165,19676,28030,1,31581,93007,147,89043,64384,2,88285,48564,163,76874,130,41276,23895,163,343,71104,71652,66711,-60,99240,55502,160,31581,44863,161,94591,53520,130,96905,28548,163,67604,9863,19381,58811,10,70729,32,26815,78686,166,87326,3171,1,66711,16,55672,130,14685,75430,163,78938,9863,64787,15183,160,17824,81640,164,19676,4349,1,46431,64787,63915,167,33245,63915,160,72613,88520,137,20489,5932,28602,23895,168,47005,27705,32,41006,58582,32,89659,88838,32,77207,58582,32,59858,19347,32,89659,88520,32,90732,58582,32,2887,58582,32,788,50359,32,38646,48564,32,73695,53520,32,73695,78686,32,1286,15183,32,1286,81640,32,604,88520,32,55961,75430,32,54448,57221,30554,57485,163,62250,32,12453,57190,58582,167,94197,30554,78686,163,94197,40257,40777,58,38865,63915,168,67559,53520,36,41276,55672,163,94591,76385,94764,3410,168,343,81640,36,10760,93007,163,48863,4205,94591,88838,38,57485,53,96905,36951,163,67604,95934,91558,57190,3410,160,18442,38865,62250,163,27645,343,42888,60504,41598,46989,78686,163,50359,34,28602,54324,163,89043,38224,31069,14321,-69,57190,81640,168,1101,88838,36,44064,28548,163,94591,2681,67057,55672,168,18442,28548,36,46989,53520,163,67559,84082,21286,53520,38,55672,53,26815,54324,163,87326,44492,45464,50359,150,3428,67834,84829,62250,151,94764,23895,163,60664,2362,11792,2,46546,53,38865,62250,162,43015,88184,78686,168,45936,88184,54324,147,27645,65146,2,35304,97196,163,48564,32,89033,48564,96,74443,58582,163,20489,83457,36903,24,30554,3410,168,67559,30554,62250,163,27645,81640,32,80220,38865,70729,163,15183,34,10760,78429,163,1101,34701,78041,76624,-33,44064,76874,169,70729,60,31581,78429,161,43015,5996,57221,35304,97196,169,23352,153,154,81640,96,41276,88520,169,90910,76874,155,343,85708,1,97196,156,67559,65146,1,1857,79904,66775,170,35304,55502,169,90910,66775,36,88416,81640,96,47706,93007,158,51073,70959,2,66902,14685,93007,171,94764,78429,169,18442,36951,34,20489,48564,96,14685,36951,158,3823,17274,2,3888,2,3410,32,5932,74443,28548,168,87326,55672,70,75425,78429,170,67559,12453,16411,15183,168,77333,97196,47,96905,70729,171,3823,5932,56594,19347,162,17824,26815,36951,168,14453,35304,58582,147,67604,50136,2,31581,19347,162,94591,22218]},"awcbb_yhh_fun9":{"variablePool":{"c_f_49":null,"c_f_50":null,"c_f_51":null},"zhili":[68986,53520,172,25186,15183,154,75425,46696,173,79904,88520,174,88838,32,84250,23895,53,41276,97196,174,60664,83752,29878,66,91543,3410,173,30554,27705,22,87117,23895,47,44863,60,75425,97196,174,27645,40036,39485,15183,172,17824,14261,32367,1101,94764,70729,22,60664,78429,47,97196,53,23895,60,16411,76874,174,343,95,70302,47706,23895,172,78938,71438,38823,89447,70111,67057,70729,173,68107,82493,75935,35304,19347,174,63915,34,99240,48564,174,18442,14353,87779,83833,-75,99240,54324,173,90910,13256]},"awcbb_yhh_fun10":{"variablePool":{"c_f_53":null,"c_f_54":null},"zhili":[96121,62250,175,47861,31581,54324,176,70729,32,27777,14685,75430,175,89447,53520,137,67604,10760,53520,176,87326,79494,99910,37,47706,3410,175,67604,64787,75430,176,94197,75425,93302,175,19676,79904,15183,176,43015,60664,88184,93007,177,78938,57335,1,55151,41276,23895,176,58582,34,46989,97196,176,19676,5505,41598,35343,-51,55502,154,39485,76874,175,60664,55672,178,88416,60919,1,24227]},"awcbb_yhh_fun11":{"variablePool":{"c_f_55":null,"c_f_56":null,"c_f_57":null,"c_f_58":null},"zhili":[11824,36951,183,36951,184,25186,55502,185,31581,58582,184,45936,12713,48564,185,99240,93007,183,1101,36025,92857,74443,6432,186,78429,96,79904,76874,186,67604,84082,78429,96,47706,97196,184,3823,71478,78686,96,96905,75430,183,89043,11170,828,14353,91543,49575,187,75430,185,99240,23895,186,67559,65843,55672,96,1314,88520,187,67604,77661,2713,2694]},"awcbb_yhh_fun12":{"variablePool":{"c_f_59":"awcbb_yhh_fun13","awcbb_yhh_fun14":"awcbb_yhh_fun14"},"zhili":[81640,202,76874,210,42801,6103,99240,97196,190,43015,76874,191,68107,3410,192,46989,93007,210,54271,13606]},"awcbb_yhh_fun13":{"variablePool":{"c_f_60":null,"c_f_61":null},"zhili":[96121,55502,193,55672,194,96868,74443,63915,193,81640,32,33245,93007,193,19676,6282,36368,2,44863,32,84250,75430,32,39485,3410,193,14453,60270,58731,2,83833,21,99240,62250,194,54271,39485,3410,193,48863,1987,88838,32,96905,55502,195,88416,93302,196,27645,76039,2,71787,96905,48564,193,67559,1314,28548,194,3823,88184,63915,195,1101,63915,197,45936,87322,2,48113]},"awcbb_yhh_fun14":{"variablePool":{"c_f_62":null,"c_f_63":null,"c_f_64":null,"c_f_65":null,"c_f_66":null,"c_f_67":null,"c_f_68":null,"c_f_69":null,"c_f_70":null},"zhili":[96121,93302,198,15183,199,4208,2580,78686,200,18442,14685,80153,201,44064,66775,198,43015,38865,93007,201,48863,38865,54324,202,60664,32878,2,33245,80153,203,74443,66775,201,1101,79904,6432,204,75425,50359,0,19676,26815,81640,199,43015,63488,70256,2,45668,18,28602,88520,204,44064,50359,199,48863,94764,70729,201,20489,26815,88520,202,67604,20807,2,80220,94764,93007,204,89043,38865,63915,203,89043,65966,58731,2,54703,9,76874,32,88285,70729,190,89043,82409,1,44119,26815,55672,205,88285,70729,203,67559,96905,88520,204,87326,73043,4492,26815,55502,206,16411,28548,205,51073,67057,93302,190,45936,5109,1,27777,35304,55502,207,33245,70729,206,94197,30554,57485,166,45936,32994,1,80220,35304,55672,208,49831,26815,44863,203,87117,46989,93007,205,77333,1314,36951,166,72613,19820,3,48910,88285,3410,208,94197,46989,57485,207,51073,19347,209,60664,64384,1,1314,44863,206,45936,71787]},"awcbb_yhh_fun15":{"variablePool":{"c_f_72":null},"zhili":[16464,53520,212,40793,99240,27705,212,89447,27940,213,154,48564,214,48863,73059,1,68818,2,46546,20,10760,19347,212,88285,55502,212,20489,56594,62250,215,67559,90670,1,10760,19347,216,88416,2112,1,71057,28602,70729,212,343,78598]},"awcbb_yhh_fun16":{"variablePool":{"c_f_74":null,"c_f_75":null,"c_f_76":null,"c_f_77":null,"c_f_78":null,"c_f_79":null},"zhili":[4852,3410,217,75430,218,4208,88285,54324,217,17824,54324,137,27645,99240,80153,219,64787,23895,219,21286,88184,15183,190,51073,3171,1,56594,61626,220,99240,78429,220,72613,91543,97196,166,43015,40103,1,16411,61626,221,79904,58582,222,93302,32,71057,56594,78686,219,1101,47706,75430,222,67559,38580,99910,35,1729,78429,221,19676,44064,75430,222,48863,79904,63915,222,48863,91543,78686,217,3823,3410,132,88416,8600,1,80220,35304,97196,222,93007,34,35304,54324,222,72613,60598,77074,91081,-46,46989,50359,218,18442,32715,6,1314,50359,220,45936,27026,4,94764,70729,221,18442,41090]},"awcbb_yhh_fun17":{"variablePool":{"c_f_81":null},"zhili":[96121,19347,223,97348,57485,224,10760,55502,223,19676,79904,44863,166,67559,19820,1,10760,76874,225,45936,81640,226,77333,88838,227,54271,7744,2,71787]},"awcbb_yhh_fun18":{"variablePool":{"c_f_83":null,"c_f_84":null,"c_f_85":null,"c_f_86":null},"zhili":[30657,55502,228,62250,229,88520,230,82018,88285,62250,229,67559,70729,200,45936,31581,57485,228,90910,58582,200,88416,49865,33245,88838,166,27645,96730,1,88184,5503,231,35304,78429,228,43015,99240,3410,166,67604,43233,1,74443,28548,231,48863,19347,209,54271,5044,1,41276,78429,229,21286,56594,58582,166,48863,35654,1,91543,55672,228,343,78429,200,94591,28602,78686,231,77333,28548,209,27645,85708,2,94764,54324,230,17824,8500,9,88285,58582,231,72613,19347,232,20489,46546,4,96905,50359,231,14453,2694]},"awcbb_yhh_fun19":{"variablePool":{"c_f_88":null,"c_f_89":null,"c_f_90":null,"c_f_91":null},"zhili":[30906,75430,233,1828,56572,57190,4098,234,47706,57485,233,343,15183,137,78938,57190,11000,235,28602,97196,236,36951,32,89033,88520,34,75425,44863,235,48863,87391,46989,19347,236,27645,60270,59823,45,41276,48564,236,90910,78686,36,30554,75430,233,54271,44863,237,20489,85708,2,76874,96,46989,66775,158,67604,76039,2,1729,28548,234,51073,54324,238,48863,85708,1,33245,3410,236,93302,36,16411,55502,236,54271,49988,41598,99573,-59,33245,75430,225,20489,31581,50359,234,78938,67057,54324,225,90910,55502,226,78938,27705,227,343,65146,2,22218]},"awcbb_yhh_fun20":{"variablePool":{},"zhili":[55069,10020,17441,44863,239,89043,8029,0]},"awcbb_yhh_fun21":{"variablePool":{"c_f_93":null},"zhili":[89136,93302,242,35731,10760,19347,242,21286,1729,97196,243,19676,27229,1,84563,57485,244,90910,60919,1,72804,48583]},"awcbb_yhh_fun22":{"variablePool":{"c_f_94":null,"c_f_95":null,"c_f_96":null},"zhili":[89136,27705,246,81364,86011,78429,247,35304,55502,246,54271,20167,57485,247,94197,38765,74753,95729,57485,248,1729,88838,246,87117,78429,137,1101,62423,44863,248,94197,70111,71652,48012,58582,247,3823,88520,137,1101,94764,1500,249,99240,78429,250,55502,130,12453,30554,62250,249,3823,99240,50359,250,68107,80736,95576,49,29904,66775,251,67604,28548,130,67057,19347,250,43015,9863,64787,48564,250,17824,43847,76874,247,54271,70729,145,77333,64384,2,16411,28548,146,87326,4349,1,35304,15183,147,3823,32878,2,75425,28548,250,75430,130,99240,23895,250,48863,1987,74753,18790,-60,965,19347,247,19347,130,1729,19347,250,18442,42458,19046,36951,247,48863,54324,145,94197,8029,1,5191,48012,88290]},"awcbb_yhh_fun23":{"variablePool":{"c_f_97":null,"c_f_98":null,"c_f_99":null,"c_f_100":null,"c_f_101":null,"c_f_102":null},"zhili":[30657,19347,254,77916,43847,55672,247,43015,28602,38571,255,26815,70729,255,48863,19347,137,20489,99240,46696,256,94341,88520,32,788,58582,32,51022,88520,32,96778,53520,32,51022,55672,32,51022,55672,32,1286,57485,32,15063,76874,32,788,88838,32,29298,58582,32,89659,27705,32,40000,55672,32,26354,93302,32,96778,76874,32,55553,62250,32,96778,53520,32,1286,64787,38571,257,30554,75430,258,76874,32,12453,38865,23895,256,94197,35304,3410,258,68107,18250,12049,62,47706,78429,257,51073,88838,36,10760,75430,258,94591,76385,64787,66775,257,45936,3410,36,56594,76874,258,3823,2681,18442,66775,38,3410,53,74443,28548,258,19676,95934,6934,39485,97196,258,3823,28602,57485,255,54271,62250,132,43015,32878,1,27029,98823,35275,14685,93007,258,88838,34,74443,63915,258,78938,82493,46358,66711,-73,88184,88838,257,21286,1314,88520,256,45936,49831,70729,259,90910,28030,2,47706,70729,260,43847,62250,251,89447,38865,44863,182,67559,57335,1,5191,38865,55502,254,20489,10804,2,10120,14,99240,93302,260,99240,88520,260,89043,14685,15183,261,87326,33009,1,80220,99595,44863,239,67559,6248,0,26815,55672,260,87326,69897]},"awcbb_yhh_fun24":{"variablePool":{},"zhili":[40277,1828,25887,57485,247,70729,154,12453,21160,93007,248,75430,32,89033,88069,23895,251,98986,15183,139,5901,58582,140,81432,40000,78686,141,79005,90732,58582,142,41006,74411,57870,36227]},"awcbb_yhh_fun25":{"variablePool":{},"zhili":[40277,4208,49359,28548,265,48012,81640,247,48863,43833,88520,137,20167,19347,248,94197,81533,54324,266,49831,58582,251,68107,32512,71787]},"awcbb_yhh_fun26":{"variablePool":{"c_f_103":null},"zhili":[80609,28548,269,77916,84563,57485,247,30554,28548,269,343,78429,265,94591,41423,46362,78686,248,28602,28548,269,67559,88838,137,19676,23997,62423,88838,251,56594,78429,269,94197,55502,266,17824,42429,96137,10701]},"awcbb_yhh_fun27":{"variablePool":{},"zhili":[85496,63158,57870,53520,251,20489,95121,72804,27705,247,77333,99741,49831,50359,248,51073,34170]},"awcbb_yhh_fun28":{"variablePool":{"c_f_104":null,"c_f_105":null,"c_f_106":null,"c_f_107":null,"c_f_108":null,"c_f_109":null},"zhili":[74066,44863,273,70729,274,66522,47706,62250,274,94197,64787,4098,275,88184,81640,273,3823,48564,36,14685,48564,275,94591,8340,38865,63915,273,68107,28548,36,38865,3410,275,67559,72056,54271,19347,38,55672,53,31581,27705,275,21286,55698,3428,57485,150,28616,24974,74158,28548,151,26815,70729,275,51073,95326,11792,2,66711,53,17441,55502,251,87326,74443,28548,273,51073,46989,88520,147,343,89716,2,74443,88838,275,54324,32,1857,78429,96,94764,93302,275,89043,60270,34180,24,38865,88520,273,67604,39485,76874,275,67604,55502,32,66902,88184,55502,275,54324,34,35304,58582,275,54271,5715,59812,13828,-33,74443,81640,276,44863,60,48012,23895,248,72613,51053,27777,96905,93007,276,22607,153,154,93007,96,31581,70729,276,45936,55502,155,94197,22054,1,70729,156,77333,9595,1,42870,31581,97196,277,64787,48564,276,67604,75430,36,343,50359,96,75425,63915,158,67559,33310,2,15881,79904,27705,278,31581,57485,276,19676,78686,34,72613,88838,96,47706,97196,158,94591,6248,2,36368,2,48564,32,22381,38865,93007,273,67559,88520,70,67057,23895,277,89447,13606,79904,28548,273,343,62250,47,41276,23895,278,90910,46431,965,44863,251,19676,56594,88520,273,51073,57190,3410,147,87117,16079,2]},"awcbb_yhh_fun29":{"variablePool":{"c_f_110":null,"c_f_111":null},"zhili":[78165,88520,280,93007,281,71825,28602,88520,280,77333,75425,78686,243,90910,76039,1,88184,75430,281,48863,99240,66775,240,18442,28548,282,94197,89716,2,53205]},"awcbb_yhh_fun30":{"variablePool":{"c_f_112":null,"c_f_113":null,"c_f_114":null,"c_f_115":null},"zhili":[40277,54324,284,81640,285,63158,44064,28548,284,78938,67057,93302,181,94197,7744,1,75425,38571,286,30554,62250,286,343,41276,75430,182,89447,50136,1,35304,28718,287,16411,28548,285,19676,3264,6,96905,54324,287,20489,10120,10,64787,57485,287,67559,96905,76874,261,20489,16079,1,87161]},"awcbb_yhh_fun31":{"variablePool":{},"zhili":[68986,9455,7020,15183,239,67559,85708,0]},"awcbb_yhh_fun32":{"variablePool":{"c_f_116":null,"c_f_117":null,"c_f_118":null,"c_f_119":null},"zhili":[89136,88520,290,40793,99595,66775,247,67604,58582,232,21286,31581,57485,290,43015,88838,291,74443,93302,292,18442,27229,3,30554,49575,293,56594,62250,293,90910,88520,137,18442,35304,43178,294,965,44863,248,39485,93302,290,14453,15183,200,72613,21160,76874,248,48863,9359,73914,28602,55502,295,48564,130,15881,79904,3410,294,17824,99240,62250,295,27645,92234,7598,49,79989,70729,251,21286,55672,130,64787,93007,295,90910,35356,31581,50359,295,88416,10760,15183,293,27645,88838,164,51073,5044,2,41276,50359,165,27645,16079,1,88184,50359,147,60664,64384,2,44064,27705,295,93302,130,35304,78686,295,88416,17681,74158,45668,-60,88695,76874,247,56594,62250,294,1101,54324,130,16411,28548,295,17824,24173,61472,63365,10,81640,32,91543,53520,166,43015,90479,1,27026,25,44863,130,46989,50359,295,88416,48965,88184,28548,293,18442,63915,232,87117,75430,192,3823,16079,1,31581,93302,166,87117,94082,1,5191,43847,12237]},"awcbb_yhh_fun33":{"variablePool":{"c_f_120":null,"c_f_121":null,"c_f_122":null,"c_f_123":null,"c_f_124":null,"c_f_125":null},"zhili":[47566,55672,297,51122,31238,70729,247,343,28602,94617,298,67057,36951,298,94197,88838,137,48863,10760,6432,299,23000,63915,32,38646,62250,32,788,81640,32,96778,53520,32,73670,48564,32,5901,23895,32,38646,78686,32,59858,58582,32,604,23895,32,77207,57485,32,76224,88838,32,29298,15183,32,96778,93007,32,34238,78686,32,37211,97196,32,604,50359,32,96778,44064,11000,300,41276,28548,301,44863,32,5191,33245,15183,299,48863,99240,70729,301,48863,60270,89099,58,39485,81640,300,3823,55502,36,67057,75430,301,3823,8340,1729,58582,300,89043,44863,36,46989,19347,301,45936,7317,19676,15183,38,55502,53,41276,66775,301,77333,97205,93694,57190,36951,298,343,1729,55502,301,94197,54271,4753,89680,47450,28602,57485,301,44863,34,1729,76874,301,18442,5505,84829,88832,-69,41276,55672,300,89043,10760,48564,299,60664,7020,28548,259,343,28030,2,75425,78429,302,965,78686,251,343,31581,44863,182,89447,50136,1,55151,88184,97196,297,3823,8500,2,79509,14,1729,97196,302,1314,3410,302,87326,30554,66775,261,17824,90670,1,66902,49831,88520,239,48863,64384,0,1314,88838,302,18442,22218]},"awcbb_yhh_fun34":{"variablePool":{},"zhili":[27580,35731,46362,66775,247,15183,32,16411,53520,166,54271,14510,1,20586,17441,28548,248,54324,32,23997,88069,54324,251,19393,36951,139,34238,97196,140,81177,38646,81640,141,84019,72179,58582,142,37211,80220,9662,49386]},"awcbb_yhh_fun35":{"variablePool":{"c_f_126":null},"zhili":[83923,46773,79989,56594,54324,240,1101,97196,191,3823,44863,264,67604,23895,305,87117,58620,1,57190,28718,306,30554,70729,306,67559,75430,265,88184,76874,306,17824,36951,265,68107,14685,78429,307,1101,5044,1,42870,44064,62250,306,67559,31848]},"awcbb_yhh_fun36":{"variablePool":{"c_f_127":null},"zhili":[56812,81640,309,81364,14685,23895,309,87117,76874,265,44064,78686,309,94591,53520,265,77333,76874,291,47706,58582,310,60664,50136,2,13606,25887,10760,78686,309,54271,14685,57485,240,67559,50359,191,48863,53520,268,14453,78429,305,78938,70959,2,19327]},"awcbb_yhh_fun37":{"variablePool":{"c_f_128":null,"c_f_129":null,"c_f_130":null,"c_f_131":null},"zhili":[67122,93007,312,19347,313,81364,75425,75430,312,87117,28602,62250,166,90910,27755,1,56594,23895,314,20489,87322,1,28602,15314,315,94764,44863,315,45936,44064,53520,182,89043,33310,1,14685,1500,316,33245,88520,313,67559,63365,6,57190,23895,316,68107,11555,10,88285,66775,316,72613,10760,55672,261,343,90670,1,51705]},"awcbb_yhh_fun38":{"variablePool":{"c_f_134":null,"c_f_135":null,"c_f_136":null,"c_f_137":null,"c_f_138":null},"zhili":[74066,88520,547,14126,29293,44064,4098,548,91543,75430,549,93302,32,48373,38865,44863,547,48863,55672,137,87117,10760,55672,549,3823,18250,36903,253,58582,150,46989,19347,550,75425,44863,549,87326,57190,93302,547,94591,28548,132,19676,20807,1,42870,33245,19347,550,78938,38580,8500,201,66775,551,35304,70729,550,51073,84008,1126,158,27705,32,15183,50,1314,3410,552,57485,553,75425,36951,550,88416,28741,4492,75425,76874,552,14453,28796,89610,58811,24,50359,47,57485,45,28602,54324,550,20489,8340,65843,88838,554,93006,41276,62250,548,94197,36951,238,1101,27229,1,18790,77,67057,48564,550,54324,555,76874,556,46989,23895,549,88520,34,30554,78686,549,90910,93006,74753,44064,62250,549,3823,88184,36951,547,20489,28548,132,90910,65146,1,32539,55672,50,56594,48564,552,54271,32351,82493,35484,57221,78686,42,3410,77,56594,50359,550,14453,46826,42125,78429,557,49988,15183,558,53520,45,99240,19347,550,77333,16980,88559,44863,150,67423,88184,54324,548,94197,88520,238,67604,50136,2,55672,558,76874,57,41276,19347,550,94591,7317,40845,81640,150,5505,78686,558,47706,48564,550,17824,12713,44863,150,67423,46989,78429,548,67604,81640,238,14453,27229,2,13828,32,78429,559,55502,57,41276,15183,550,343,98549,42053,23895,560,11855,54324,558,10760,78686,550,51073,31572,81640,150,1987,44064,55672,548,89447,70729,238,78938,33310,2,99573,13,16411,63915,550,68107,79904,28548,548,19676,58582,238,54271,16079,1,67057,93007,549,55672,34,41276,57485,549,19676,35484,67240,20381,-267,14685,55502,548,19676,53205]},"awcbb_yhh_fun39":{"variablePool":{"c_f_146":null,"c_f_141":null,"c_f_155":null,"c_f_143":null,"c_f_144":null,"c_f_145":null,"c_f_147":null,"c_f_148":null,"awcbb_yhh_fun40":"awcbb_yhh_fun40","awcbb_yhh_fun41":"awcbb_yhh_fun41","awcbb_yhh_fun42":"awcbb_yhh_fun42","awcbb_yhh_fun43":"awcbb_yhh_fun43","c_f_149":null,"awcbb_yhh_fun44":"awcbb_yhh_fun44","c_f_152":null,"c_f_153":null,"c_f_154":null,"c_f_156":null,"awcbb_yhh_fun45":"awcbb_yhh_fun45"},"zhili":[57485,572,36951,573,70729,574,36951,575,15183,579,62250,594,96121,70729,561,74131,1729,76874,561,17824,28602,57485,562,94591,8029,1,57190,70729,166,21286,43233,1,75425,28718,563,88838,96,23895,53,15183,57,19347,60,1729,93007,563,68107,58582,137,77333,67423,86028,11007,54331,99240,83428,564,28548,36,99240,66775,564,18442,6934,31581,93007,166,343,55376,1,75425,60609,561,64787,36951,563,51073,76874,232,54271,33245,55672,166,87326,32994,1,75425,57485,561,67604,28548,209,90910,89716,1,31581,97196,561,56594,75430,561,54271,97196,232,94591,47706,28548,565,87326,56180,1,23997,88285,57485,566,46989,55502,561,343,50359,232,90910,33245,54324,567,87326,43233,1,84250,47706,53520,568,15183,32,78763,91543,75430,564,78938,99240,54324,568,343,65205,21557,38,74443,19347,561,87117,88184,50359,568,343,44863,36,38865,93302,568,94197,16751,96905,66775,566,343,66775,569,60664,50136,1,22381,94764,97196,568,53520,34,39485,63915,568,20489,70111,26645,35343,-49,39485,27705,561,89043,27705,36,33245,78686,563,60664,36951,137,78938,46826,39485,54324,561,60664,76874,36,79904,55502,563,51073,36951,137,87117,95390,343,57485,60,55672,38,94764,88838,563,90910,58582,137,67604,97927,76145,66775,131,87391,93302,150,11007,81394,9768,39485,48564,561,94591,3410,34,16411,66775,564,17824,22875,53520,38,30554,44863,563,89043,55502,137,17824,77661,20586,43838,30554,84035,570,22503,28602,15183,572,19676,37211,64787,78686,573,18442,788,38865,36951,574,17824,73695,14685,70729,575,17824,38646,64787,61626,576,74443,15183,579,67604,28602,1500,580,13190,81640,581,38646,58582,582,76224,36951,583,74183,54448,78429,584,72456,76224,38865,21502,585,47005,70729,139,38646,3410,140,60721,77207,57485,224,68901,57485,224,37737,62250,586,94364,1286,31581,43178,571,64787,55672,571,89447,48564,36,44064,48564,571,43015,53520,32,14453,97472,55151,99240,88520,571,343,78429,38,14685,93007,571,72613,53520,34,88416,36568,84250,35304,76874,568,70729,32,13606,88184,93007,561,14453,78686,137,51073,57190,15183,568,88416,84008,92665,325,62250,32,30554,55502,571,54271,28548,192,3823,16079,1,94764,72245,587,28602,15183,588,63915,32,20586,55672,589,56594,62250,588,17824,60270,40777,224,46989,57485,570,89043,47706,97196,588,72613,58582,96,14685,70729,588,45936,79494,63365,61,46989,28548,570,67559,57485,96,41276,15183,588,89043,61584,87326,94764,76874,570,90910,88520,70,88184,75430,588,21286,22875,18442,75425,3410,570,14453,54324,60,57190,57485,588,51073,41666,1101,64787,63915,570,72613,28548,38,35304,55672,588,87117,22875,14453,52684,16259,73693,44863,34,39485,58582,580,67604,64384,2,95293,14,46989,54324,561,27645,1729,57485,588,67604,16411,55502,568,18442,54097,18442,20586,99240,55672,566,50359,32,33245,63915,585,88416,54324,32,53520,44,44064,57485,588,1101,23097,9300,90910,39485,27705,570,20489,16411,15183,588,21286,48863,30554,55502,571,94197,88520,53,45936,33245,57485,576,51073,93302,32,78686,44,91543,23895,588,89447,77649,2713,94197,20807,0,16411,76874,571,94197,93007,32,343,55502,55,64787,28548,580,88416,4349,2,14353,93063,14353,49865,6282,80220,46989,36951,571,51073,19347,34,67057,78429,571,48863,57485,34,67604,23895,590,99240,55502,580,67604,7744,2,55151,33245,55502,571,21286,50359,591,89043,7744,0,26815,75430,566,48863,47706,76874,571,72613,62250,592,1101,17274,1,56594,93007,588,63915,34,1729,75430,588,67604,11855,97228,66711,-233,88285,75430,588,88838,32,57221,55502,55,64787,78686,588,77333,61472,93780,44,33245,28548,571,67559,96905,62250,588,20489,15183,32,39485,55672,587,27645,57190,66775,588,45936,78938,75425,93007,571,72613,99240,81640,588,21286,51073,34701,70675,5191,14685,57485,588,23895,34,10760,75430,588,67559,17681,67240,23615,-53,75425,70729,568,55502,96,39485,88520,568,68107,49988,20325,83833,-339,67057,93302,566,35304,36951,571,78938,46989,54324,565,14453,90479,1,55502,232,19676,46989,78429,567,60664,92428,1,89033,19347,32,88184,81331,568,97196,55,44064,88520,568,68107,41343,92665,38,94764,76874,571,14453,38865,15183,568,43015,75430,36,47706,53520,568,60664,91558,96905,78686,566,77333,88838,569,77333,87322,1,75672,99240,50359,568,55502,34,46989,53520,568,78938,5715,59812,45668,-47,63915,154,26815,58582,571,94591,67057,50359,565,20489,94082,1,57485,232,89043,99240,36951,166,68107,98065,1,28602,55502,594,94591,14685,58582,595,3823,3410,191,88416,93302,596,68107,55672,305,60664,64384,2,57485,178,48863,33009,1,31581,30842,597,64787,50359,597,1101,51705]},"awcbb_yhh_fun40":{"variablePool":{},"zhili":[96121,71825,44064,53520,571,87117,81640,38,14453,91543,50359,571,48863,28548,34,94197,95955,36025,56594,53520,571,94197,55502,36,87117,88285,78686,571,27645,54324,34,87326,40845,20948,12452]},"awcbb_yhh_fun41":{"variablePool":{},"zhili":[40277,85598,38865,88520,571,72613,78429,38,89043,35304,54324,571,89447,23895,36,18442,44064,44863,571,78938,97196,34,87326,34041,52684,69897]},"awcbb_yhh_fun42":{"variablePool":{},"zhili":[49364,25915,33245,48564,571,45936,78686,38,94591,79904,36951,571,90910,3410,36,90910,18462,99240,55502,571,89447,88838,38,90910,38865,15183,571,20489,76874,34,90910,31572,14685,28548,571,18442,19347,36,72613,1314,54324,571,89447,27705,34,17824,65843,16370,92283,12237]},"awcbb_yhh_fun43":{"variablePool":{},"zhili":[11824,4208,38865,55502,571,77333,66775,38,48863,14685,78686,571,94197,78429,36,3823,10760,3410,571,78938,36951,34,343,44203,74905,19327]},"awcbb_yhh_fun44":{"variablePool":{"c_f_150":null,"c_f_151":null},"zhili":[67122,97196,577,44863,578,25915,1314,78429,578,54271,66775,29,42458,28602,63915,577,48863,39860,44064,44863,578,72613,56594,70729,577,45936,4753,30693,30621]},"awcbb_yhh_fun45":{"variablePool":{"c_f_157":null},"zhili":[30657,63915,593,71825,63915,96,33245,3410,593,89447,78686,155,78938,16079,1,78429,96,28602,58582,593,27645,43668,96097,4,58582,154,42071,2,54324,6,93063,13256]},"awcbb_yhh_fun46":{"variablePool":{"c_f_159":null,"c_f_160":null,"c_f_161":null,"c_f_162":null,"c_f_163":null,"awcbb_yhh_fun47":"awcbb_yhh_fun47","c_f_170":null,"awcbb_yhh_fun55":"awcbb_yhh_fun55","awcbb_yhh_fun56":"awcbb_yhh_fun56","awcbb_yhh_fun57":"awcbb_yhh_fun57","awcbb_yhh_fun58":"awcbb_yhh_fun58","awcbb_yhh_fun59":"awcbb_yhh_fun59","awcbb_yhh_fun60":"awcbb_yhh_fun60","c_f_196":null,"c_f_197":null,"awcbb_yhh_fun63":"awcbb_yhh_fun63","awcbb_yhh_fun64":"awcbb_yhh_fun64","c_f_208":null,"awcbb_yhh_fun65":"awcbb_yhh_fun65","awcbb_yhh_fun66":"awcbb_yhh_fun66","c_f_219":null,"awcbb_yhh_fun67":"awcbb_yhh_fun67","awcbb_yhh_fun68":"awcbb_yhh_fun68","c_f_223":null,"awcbb_yhh_fun69":"awcbb_yhh_fun69","awcbb_yhh_fun70":"awcbb_yhh_fun70","awcbb_yhh_fun71":"awcbb_yhh_fun71","awcbb_yhh_fun72":"awcbb_yhh_fun72","c_f_237":null,"awcbb_yhh_fun73":"awcbb_yhh_fun73","awcbb_yhh_fun74":"awcbb_yhh_fun74","awcbb_yhh_fun75":"awcbb_yhh_fun75","awcbb_yhh_fun76":"awcbb_yhh_fun76","awcbb_yhh_fun77":"awcbb_yhh_fun77","awcbb_yhh_fun79":"awcbb_yhh_fun79","c_f_248":null},"zhili":[44863,627,27705,634,93302,638,75430,649,88838,653,81640,655,55502,676,19347,686,57485,692,28548,700,27705,705,3410,714,50359,716,81640,721,63915,725,53520,742,63915,744,19347,749,50359,751,50359,754,55502,759,70729,766,75430,774,80609,93007,599,58582,600,35731,87090,30554,11000,601,74443,19347,601,87117,36951,602,36547,71057,10760,55672,601,89447,66775,602,54271,1314,83428,603,39485,23895,603,1101,23895,604,14685,97196,627,17824,22054,0,78763,35304,15183,603,27645,27705,604,343,56594,61626,628,14685,93302,603,67559,88520,629,71593,3410,610,16411,78686,634,19676,920,93302,155,57190,93302,638,77333,32512,76874,639,44064,66775,649,68107,31051,27705,645,99240,63915,653,77333,38023,93302,625,26815,78686,655,343,59324,70729,656,1314,54324,676,94197,32512,79904,70729,628,67604,78686,605,89447,57335,1,75672,1729,48564,603,43015,28548,629,45936,28602,80153,675,1314,23895,601,90910,28548,677,36547,1857,64787,93302,601,89447,63915,677,67604,33245,57563,678,99240,76874,678,51073,28548,679,13592,36951,637,57190,55502,686,68107,80217,23895,687,28602,54324,692,343,64342,57221,44064,78686,678,19676,28548,679,27645,38865,34739,636,35304,19347,678,3823,23895,693,45071,23895,637,88184,3410,700,94591,69395,78429,687,14685,81640,705,3823,13534,23997,16411,15183,678,90910,78429,693,48863,28602,88820,706,47706,15183,678,78938,93007,707,57396,78429,637,57190,81640,714,90910,32512,88838,687,57190,48564,716,94197,26647,5191,28602,78686,678,48863,78429,707,77333,94764,65652,717,44064,28548,603,45936,19347,718,23627,97196,239,41276,55502,721,72613,59324,75430,722,74443,44863,725,89447,42433,27705,726,91543,75430,742,51073,90204,55502,625,88285,66775,744,67559,78655,53520,735,62250,32,15713,35304,27705,628,77333,53520,605,21286,87322,1,89033,1729,50359,603,343,23895,718,67559,99240,61626,745,26815,81640,603,48863,36951,746,47792,97196,747,64787,78429,628,20489,93302,605,77333,85708,0,13534,3410,610,16411,44863,749,18442,38491,36951,239,31581,81640,751,89447,81533,57485,752,57190,62250,754,89043,64342,27705,755,56594,93302,759,87326,59324,19347,731,53520,29,93007,760,37199,13534,76874,761,88184,78686,766,48863,42433,48564,767,1729,62250,774,94197,85110,38865,81640,745,89043,15183,605,43015,22054,1,80220,31581,27705,603,19676,62250,746,94197,91543,43178,775,99240,44863,601,3823,36951,776,13592,13606,47706,66775,601,72613,44863,776,78938,16411,21502,771,39485,78429,601,60664,19327]},"awcbb_yhh_fun47":{"variablePool":{"c_f_164":"awcbb_yhh_fun48","awcbb_yhh_fun49":"awcbb_yhh_fun49","awcbb_yhh_fun51":"awcbb_yhh_fun51","awcbb_yhh_fun52":"awcbb_yhh_fun52","awcbb_yhh_fun53":"awcbb_yhh_fun53","awcbb_yhh_fun54":"awcbb_yhh_fun54"},"zhili":[3410,607,75430,615,50359,618,15183,619,78429,624,75430,626,26266,9455,10446,88520,605,35304,70729,615,54271,75055,93302,616,64787,53520,618,94197,83964,3410,610,33245,62250,619,18442,1800,50359,609,91543,88838,624,94197,83964,88520,625,44064,50359,626,78938,32512,12237]},"awcbb_yhh_fun48":{"variablePool":{},"zhili":[56812,4458]},"awcbb_yhh_fun49":{"variablePool":{"c_f_165":null,"c_f_166":null,"awcbb_yhh_fun50":"awcbb_yhh_fun50"},"zhili":[81640,614,48796,53520,606,51122,79904,15183,607,14453,23895,191,95101,41423,79904,93302,607,48863,35654,0,79904,4098,608,41276,88838,606,88416,70256,2,79509,13,39485,28548,606,43015,38865,97196,608,67604,28548,609,77333,22054,1,93302,610,99240,55672,608,94197,66775,611,17824,27229,1,58168,83861,12,1314,55502,608,94197,58582,610,67559,88695,55502,610,67559,19345,10804,2,20381,11,30554,28548,608,94197,19347,610,38865,76874,614,60664,22381,1314,57485,608,54271,3410,610,343,70729,191,16411,23895,608,17824,27777,44064,53520,608,94197,93007,613,2580,42870,74443,57485,608,67559,54628]},"awcbb_yhh_fun50":{"variablePool":{},"zhili":[11824,96868,25887,46989,28548,612,87117,39485,54324,608,20489,55502,613,78938,44863,610,60664,75430,227,67604,57335,2]},"awcbb_yhh_fun51":{"variablePool":{"c_f_167":null},"zhili":[67122,66522,31238,55672,605,67604,8600,0,33245,65652,617,26815,58582,617,1101,79904,55672,612,21286,67057,93302,617,21286,97196,610,20489,63915,227,17824,50136,2,88285,15183,617,18442,24227]},"awcbb_yhh_fun52":{"variablePool":{},"zhili":[49364,96868]},"awcbb_yhh_fun53":{"variablePool":{"c_f_168":null,"c_f_169":null},"zhili":[7580,23895,620,25915,30554,55672,620,77333,99819,58582,32,91543,80153,621,35304,50359,621,87326,41276,62250,622,77333,76874,137,18442,49979,81347,50,1314,44863,623,74443,57485,622,87117,64787,88838,621,18442,89043,59812,33245,66775,623,48863,99240,54324,620,27645,3410,611,77333,33009,1,8500,2,45668,15,99595,96905,36951,623,90910,16411,15183,620,68107,10760,54324,623,87117,77333,57221,94764,63674,621,87158,-64,19347,155,41276,36951,620,3823,54324,611,3823,60919,1,96097,2,99573,11,48012,63915,155,79904,36951,620,67559,97196,155,90910,5932]},"awcbb_yhh_fun54":{"variablePool":{},"zhili":[26266,63158,20167,46362,53520,610,17824,27705,191,18442,81640,605,60664,7744,1,49386]},"awcbb_yhh_fun55":{"variablePool":{"c_f_171":null,"c_f_172":null},"zhili":[56812,48564,630,88838,631,66522,14685,36951,630,31238,66775,632,46989,55672,630,94197,47718,1,22503,20586,74993,78686,632,17824,42429,38865,58582,600,17824,64787,58582,631,45936,25492,8500,16,55210,3410,633,28548,53,26815,58582,630,45936,75430,137,18442,94528,74411,69043,8,19046,19347,633,16411,78686,631,72613,1857]},"awcbb_yhh_fun56":{"variablePool":{"c_f_173":null},"zhili":[85496,3410,635,25915,19046,28602,70729,635,17824,93258,4,28602,57485,636,18442,76874,637,68107,8600,1,69897]},"awcbb_yhh_fun57":{"variablePool":{"c_f_174":null,"c_f_175":null,"c_f_176":null,"c_f_177":null,"c_f_178":null,"c_f_181":null,"c_f_180":null},"zhili":[40277,36951,640,25186,72804,70729,632,87117,35304,80153,641,39485,3410,640,87117,78429,632,3823,38865,84035,642,25887,36951,633,1101,88184,6432,643,44064,3410,640,343,97196,633,94591,14685,28718,644,49831,23895,645,27645,87322,0,3410,53,39485,78686,643,60664,62365,74917,94,78429,185,38865,3410,642,78938,3410,137,18442,8753,51219,22,44064,57485,641,60664,31581,23895,642,89447,28602,3410,641,20489,62250,238,67559,78686,227,89447,9595,2,58949,58,70729,32,67057,72245,646,39485,93302,644,54271,64787,36951,646,94591,69555,25715,42,94764,55502,641,21286,48564,36,10760,97196,646,51073,79904,55502,643,89043,70111,76468,96905,15183,642,87326,48564,36,75425,88520,646,1101,14809,87326,42870,99240,28548,646,27705,53,99240,75430,646,90910,67423,31069,46546,-53,99573,119,81640,32,75425,50927,646,94764,62250,644,89043,88184,78429,646,87326,2823,24066,103,58582,647,76874,60,78429,53,41276,53520,646,1101,41096,34026,15183,131,84741,57190,93007,642,90910,75430,36,79904,62250,646,17824,9683,89447,22385,78764,74443,65652,648,74443,78686,641,27645,88520,36,94764,50359,646,72613,46989,27705,643,3823,71340,9683,38865,88838,641,60664,66775,36,74443,57485,646,27645,91543,81640,643,1101,67423,96423,88416,78686,60,19347,53,28602,93007,646,78938,10760,36951,643,18442,9359,41096,5996,55672,131,44459,75425,53520,648,20489,3428,45256,46358,39485,78686,646,53520,34,74443,88520,646,94197,70302,75935,58949,-114,72804,57485,633,30554,88520,644,78938,84563,58582,633,43015,71340,59812,43847,30621]},"awcbb_yhh_fun58":{"variablePool":{"c_f_182":null,"c_f_183":null},"zhili":[85496,63158,95729,78429,632,88416,46989,4098,650,43847,88838,633,48863,96905,21502,651,96905,97196,650,45936,44863,36,28602,93302,651,90910,14809,57190,27705,650,78938,76874,36,47706,88520,651,17824,41352,89447,78686,60,78686,53,88184,48564,651,45936,19510,94078,15183,29,95018,54324,3,27691,88559,84829,1314,15183,650,1101,53520,137,55502,53,47706,15183,651,14453,51826,10760,58582,599,67604,66775,652,3823,60919,1,20586]},"awcbb_yhh_fun59":{"variablePool":{"c_f_184":null},"zhili":[11824,63158,95101,57190,54324,628,87326,44863,625,43015,70729,305,67604,2112,1,30554,31786,654,67057,93302,654,68107,78686,632,44863,32,21160,75430,632,94197,58582,192,72613,2112,1,42429,1314,53520,654,17824,71787]},"awcbb_yhh_fun60":{"variablePool":{"c_f_185":null,"c_f_186":null,"c_f_187":null,"awcbb_yhh_fun61":"awcbb_yhh_fun61","c_f_193":null,"c_f_194":null,"c_f_195":null},"zhili":[48564,669,68986,88838,657,4208,20093,41276,46696,658,67057,93302,669,1101,75425,49575,670,78429,32,46989,80153,671,81640,32,24666,33245,11000,672,30554,70729,657,87117,88184,57485,671,68107,30541,23295,74,50359,666,16411,76874,672,87326,93258,9,74443,93302,599,72613,58582,656,67559,33310,0,94528,14685,76874,670,3823,90670,1,39485,72245,673,74443,57485,672,62250,674,33245,70729,673,1101,57335,0,96542,48373,57485,32,15183,666,99240,36951,673,88416,57335,0,44838,70675,33245,54324,658,67604,88838,238,54271,87322,1,74443,88838,671,78686,53,1314,55672,671,94591,60598,31069,11949,-85,99240,75430,658,45936,91543,15183,657,77333,1729,88520,675,343,97196,610,78938,40103,2,10701]},"awcbb_yhh_fun61":{"variablePool":{"c_f_189":null,"c_f_190":null,"c_f_191":null,"awcbb_yhh_fun62":"awcbb_yhh_fun62"},"zhili":[63915,668,6310,23895,659,74131,94764,81640,659,94197,56594,50671,659,62250,660,67057,94617,661,81640,3,1314,88820,662,99240,28548,668,45936,53205]},"awcbb_yhh_fun62":{"variablePool":{"c_f_192":null},"zhili":[4852,40217,10760,97196,661,67057,57485,662,77333,97196,96,39485,62250,661,14453,68839,55672,185,41276,88520,661,77333,51407,97196,663,28902,49988,78764,51541,56594,54324,659,96905,88520,662,88416,23895,96,39485,3410,659,87326,82950,81640,185,39485,54324,659,19676,42125,66775,664,94078,82493,46230,48373,39485,63915,662,90910,57190,55502,659,27645,66775,96,64787,88838,661,72613,27029,9359,78764,28602,31786,665,28602,78429,665,35304,88838,665,94197,55672,666,41125,87779,91543,36951,665,28548,667,96905,78429,665,67604,70111,26645,57485,667,46989,93007,599,87326,78429,656,20489,57335,0,88461,11792,5,62250,34,34348,10120,2,78429,34,57190,36951,665,54271,15376,48113]},"awcbb_yhh_fun63":{"variablePool":{"c_f_198":null,"c_f_199":null,"c_f_200":null,"c_f_201":null,"c_f_202":null,"c_f_203":null},"zhili":[89136,70729,680,25915,31581,3410,680,78938,93302,632,21286,41276,94617,681,1314,44863,680,72613,66775,633,43015,14685,49575,682,36404,1729,80153,683,55502,32,10760,61626,684,75425,97196,682,18442,91543,54324,684,343,29809,81347,91,48564,647,93302,60,76874,53,16411,48564,684,60664,75170,28902,93007,131,463,47706,27705,681,68107,63915,36,1314,54324,684,1101,39406,45936,86028,13485,41276,84035,685,27705,96,55502,53,99240,23895,685,21286,25960,81640,155,27645,58620,1,64787,55672,683,60664,50359,238,87326,50136,1,93007,96,62250,47,67057,93302,685,87326,51407,70729,155,77333,65146,1,1729,88520,683,43015,53520,238,48863,8029,1,33245,3410,684,97196,34,75425,97196,684,67604,71340,67240,69043,-102,55672,154,88184,55672,683,54271,78429,178,94591,22054,1,2694]},"awcbb_yhh_fun64":{"variablePool":{"c_f_204":null,"c_f_205":null,"c_f_206":null,"c_f_207":null},"zhili":[74066,27705,688,74131,38865,76874,688,43015,63915,137,343,41276,60609,689,24489,35304,83428,690,15183,32,31581,1500,691,91543,88520,689,72613,31581,19347,691,89447,84008,21557,75,46989,23895,690,48863,50359,38,26815,48564,691,87326,9683,94764,88520,690,94591,23895,38,91543,3410,691,54271,58404,60664,93302,53,88520,60,46989,54324,691,343,36411,28902,78429,131,44596,47706,57485,691,54271,62250,36,35304,19347,688,48863,19347,237,43015,89716,2,55672,96,96905,53520,158,77333,87322,2,6934,55837,20325,99240,48564,691,76874,36,57190,58582,691,43015,49904,46358,54703,-86,47706,19347,690,17824,28548,36,41276,54324,689,14453,87788,91543,76874,675,19676,54324,610,87117,40457,2,22218]},"awcbb_yhh_fun65":{"variablePool":{"c_f_209":null,"c_f_210":null,"c_f_211":null,"c_f_212":null,"c_f_213":null,"c_f_214":null},"zhili":[28319,63915,694,77916,96905,66775,694,89447,62250,632,87117,57190,50671,695,16411,55502,694,45936,93007,633,89447,47706,1500,696,81389,31581,57563,697,28548,32,57190,50927,698,56594,50359,696,88416,94764,28548,698,43015,30541,61567,67,23895,647,63915,60,78686,53,74443,28548,698,89447,53519,21144,54324,131,95018,44064,27705,695,45936,28548,36,47706,70729,698,67604,9276,89447,58619,88559,31581,4098,699,56594,88520,699,89447,79904,88838,225,19676,3410,226,51073,89716,1,47706,55672,697,51073,76874,238,94197,16079,1,56594,36951,698,53520,34,91543,76874,698,1101,94841,47450,66711,-78,88520,154,79904,54324,697,1101,50359,178,3823,7744,1,95833]},"awcbb_yhh_fun66":{"variablePool":{"c_f_215":null,"c_f_216":null,"c_f_217":null,"c_f_218":null},"zhili":[68986,78429,701,11653,91543,75430,701,77333,78686,137,72613,57190,94617,702,63390,99240,5503,703,88838,32,30554,50927,704,41276,75430,702,48863,33245,3410,704,94197,60886,12049,68,96905,28548,703,60664,27705,36,28602,58582,704,89043,41303,75425,48564,703,27645,97196,36,1314,28548,704,94591,45243,3823,62250,60,78429,53,56594,28548,704,14453,57182,94528,78686,131,92472,28548,647,28602,19347,704,78938,88184,88838,701,89043,48564,132,48863,70959,1,31572,11499,16370,9768,35304,53520,704,57485,34,75425,3410,704,20489,5505,77074,58949,-79,30554,76874,703,45936,74443,50359,702,18442,74443,66775,675,77333,88520,610,67559,92428,2,40332]},"awcbb_yhh_fun67":{"variablePool":{"c_f_220":null,"c_f_221":null},"zhili":[78165,93302,708,50626,18510,27,14,1,16411,58582,708,51073,94764,19347,706,21286,15183,637,54271,33310,1,39485,58582,709,14453,27229,1,31581,23895,710,14453,28030,1,49386,48113,30554,55502,711,85619,27705,712,56594,15183,713,43015,40457,1,13588,12237,12237]},"awcbb_yhh_fun68":{"variablePool":{"c_f_222":null},"zhili":[49364,93302,715,11653,46989,97196,715,68107,88184,66775,215,88416,33310,1,16411,28548,216,89447,2112,1,35304,28548,706,88416,63915,687,1101,7744,1,10701]},"awcbb_yhh_fun69":{"variablePool":{},"zhili":[74066,1644,21160,36951,719,56594,57485,675,343,62250,610,14453,47625,0,13606,95729,54324,720,62250,32,48910]},"awcbb_yhh_fun70":{"variablePool":{"c_f_224":null},"zhili":[85496,76874,723,96868,53520,724,14685,66775,723,89447,86140,12583,96097,2,46546,17,31581,44863,723,74443,63915,723,72613,30554,44863,717,88416,88838,687,343,65146,1,48373,1314,97196,723,67559,25887,48564,719,14453,48564,639,48863,17274,1,49831,81640,720,39485,75430,723,343,48564,633,89043,49831,75430,720,45936,71340,59812]},"awcbb_yhh_fun71":{"variablePool":{"c_f_225":null,"c_f_226":null,"c_f_227":null,"c_f_228":null,"c_f_229":null,"c_f_230":null,"c_f_231":null,"c_f_232":null,"c_f_233":null,"c_f_234":null,"c_f_235":null},"zhili":[40277,76874,727,82018,25887,55502,719,67604,26815,31786,728,39485,63915,728,17824,58582,632,67559,16411,94617,729,88184,66775,728,27645,76874,633,67604,67057,38571,730,62423,93007,731,27645,57190,72245,732,23895,53,88285,97196,732,60664,64845,88285,61626,733,96905,88838,733,60664,99240,53520,730,14453,80835,79904,4098,734,96905,75430,727,54271,74917,29,99240,88838,734,25887,55672,735,21286,88838,32,47706,78686,734,68107,6282,61584,78429,32,33245,58582,599,21286,55502,196,51073,2112,2,57221,14321,17,74443,58582,734,46989,93302,734,1101,41276,93007,599,94591,78686,652,19676,4349,1,12453,26815,66775,732,67604,30554,50359,734,89043,71145,47706,1500,736,78686,53,74443,48564,736,87117,95,10760,93007,730,54271,10760,28548,599,17824,50359,197,87117,33310,2,74443,84035,737,26815,50359,736,14453,8500,2,79509,82,57485,32,47706,80153,738,1314,55672,736,343,56594,81640,738,20489,22960,68909,29,67057,44863,729,67559,1314,78686,738,43015,55210,78686,739,14453,57335,2,88184,81640,738,30554,58582,732,1101,91543,57485,738,19676,9359,84829,95293,-40,78686,32,75425,44863,736,60664,67057,78686,729,89447,55502,740,19676,8029,2,31581,49575,741,16411,93302,728,45936,75430,633,64787,55672,728,88416,15183,633,77333,96905,23895,737,67604,30478,75453,30554,36951,741,45936,88285,70729,737,17824,88184,55502,675,60664,15183,610,51073,5109,2,22218]},"awcbb_yhh_fun72":{"variablePool":{"c_f_236":null},"zhili":[85496,4208,84563,64787,48564,628,20489,93302,625,20489,97196,305,27645,90670,1,46989,28718,743,14685,27705,743,89043,93007,719,99595,23895,719,60664,81640,625,94591,33009,0,27777,56594,54324,743,72613,44119]},"awcbb_yhh_fun73":{"variablePool":{"c_f_238":null},"zhili":[48796,23895,748,97348,965,55672,747,31581,19347,748,90910,17441,78686,747,67604,81640,605,89447,5044,1,1857,84563,53520,239,19676,33310,0]},"awcbb_yhh_fun74":{"variablePool":{},"zhili":[7580,63158,31238,10760,55672,745,19676,88520,239,72613,53520,305,43015,22054,1,48012,88520,750,14453,87322,0]},"awcbb_yhh_fun75":{"variablePool":{"c_f_239":null},"zhili":[87472,58582,753,14126,88285,53520,753,87117,62423,23895,722,94197,50136,1,84563,53520,726,90910,73059,0,79989,66789]},"awcbb_yhh_fun76":{"variablePool":{"c_f_240":null,"c_f_241":null},"zhili":[56812,97196,756,1644,33245,88520,756,68107,10622,2,66711,10,39485,93302,756,27645,29904,28548,722,67604,73059,1,88069,88520,757,78938,20807,0,35304,61626,758,16411,66775,758,21286,49386]},"awcbb_yhh_fun77":{"variablePool":{"c_f_242":null,"awcbb_yhh_fun78":"awcbb_yhh_fun78"},"zhili":[78429,765,16464,27705,762,81364,35304,15183,765,51073,88290]},"awcbb_yhh_fun78":{"variablePool":{"c_f_243":null,"c_f_244":null},"zhili":[85032,70729,763,88838,764,46773,16411,70729,763,89447,91543,58582,764,68107,35304,62250,762,43015,3410,610,18442,84365,1,88520,755,88416,87322,1,87161]},"awcbb_yhh_fun79":{"variablePool":{"c_f_245":null,"awcbb_yhh_fun80":"awcbb_yhh_fun80"},"zhili":[88520,773,68986,81640,768,14126,88285,70729,773,1101,49386]},"awcbb_yhh_fun80":{"variablePool":{"c_f_246":null,"c_f_247":null},"zhili":[38452,53520,769,78686,770,1644,57190,66775,769,48863,10760,55502,768,87326,31581,66775,770,14453,88285,88838,771,21286,63915,772,60664,50359,610,343,14510,2,44863,755,43015,20807,1,70202]},"awcbb_yhh_fun81":{"variablePool":{"c_f_249":null,"c_f_250":null,"c_f_251":null,"c_f_252":null,"c_f_253":null,"awcbb_yhh_fun82":"awcbb_yhh_fun82","awcbb_yhh_fun83":"awcbb_yhh_fun83","c_f_273":"awcbb_yhh_fun84"},"zhili":[55502,798,78686,809,75430,808,27580,4458,46989,62250,598,3823,56594,49575,778,56594,36951,778,343,28548,602,20489,64787,34739,779,57190,63915,779,60664,19347,629,14453,1314,88820,780,1729,48564,778,51073,3410,677,3823,64787,4098,781,57190,36951,781,54271,78686,782,52508,93007,637,30554,57485,798,89043,38491,93302,687,47706,19347,809,89043,64342,15183,786,88838,810,69395,55151,30554,53520,781,88416,78429,782,90910,99240,15314,811]},"awcbb_yhh_fun82":{"variablePool":{"c_f_254":null,"c_f_255":null,"c_f_256":null,"c_f_257":null,"c_f_258":null,"c_f_259":null,"c_f_260":null,"c_f_261":null,"c_f_262":null,"c_f_263":null,"c_f_264":null,"c_f_265":null},"zhili":[85496,78429,783,71825,67057,93007,783,48863,27705,632,51073,41276,4098,784,28602,88838,783,72613,57485,633,94591,75425,81331,785,2264,57485,786,48863,30554,11000,787,14685,15183,783,48863,50359,645,19676,89716,0,22505,14685,81076,788,78686,32,14685,5503,789,38865,58582,785,43015,33245,88838,789,17824,79494,29878,226,70729,647,70729,60,36951,53,41276,50359,789,88416,53519,94528,23895,131,92472,64787,81640,784,88416,76874,36,88184,81640,789,43015,96423,94591,72753,78764,1314,11000,790,70729,647,50359,60,88520,53,44863,34,30554,19347,789,67559,54331,83904,10790,81640,131,2476,33245,55672,784,78938,70729,36,78429,34,1314,93302,789,3823,17681,25964,343,58619,38823,88184,72245,791,36951,647,50359,60,76874,53,70729,36,64787,19347,789,27645,92857,80999,32865,75430,131,62749,74443,57485,784,87326,57485,36,81640,36,79904,19347,789,67559,35484,45243,88416,39860,2147,46989,83428,792,67057,81640,792,90910,53520,60,57190,88838,791,17824,4753,93007,96,46989,54324,790,21286,9764,89680,71766,10760,46696,793,97196,32,30554,31786,794,70729,53,44064,27705,794,68107,29809,29987,17,99240,3410,785,17824,23895,795,16411,23895,794,20489,71145,56594,63915,789,87326,38765,19381,79653,49,3410,558,26815,27705,794,51073,57485,38,48965,76874,57,76145,31581,36951,793,68107,41303,97927,91543,3410,787,72613,81640,796,78938,22054,1,31581,54324,788,67604,57485,238,27645,76039,1,94764,19347,794,36951,34,79904,75430,794,89043,93006,97605,88832,-77,64787,23895,789,78686,38,38865,93302,789,51073,11855,97605,83833,-237,58582,130,46989,23895,787,17824,63915,796,89447,17274,1,88184,84035,797,26815,44863,797,90910,74917,2,27026,27,48564,53,33245,57485,788,20489,55672,137,60664,75992,33222,15,35304,3410,797,89447,1729,63915,788,72613,76874,238,94591,58620,1,69043,-27,78429,154,79904,15183,788,54271,36951,178,54271,89716,1,19327]},"awcbb_yhh_fun83":{"variablePool":{"c_f_266":null,"c_f_267":null,"c_f_268":null,"c_f_269":null,"c_f_270":null,"c_f_271":null,"c_f_272":null},"zhili":[80609,23895,799,63158,44064,88838,799,60664,27705,137,45936,38865,15314,800,25887,78686,786,90910,56594,84035,801,99595,97196,802,67604,88285,34739,803,57190,53520,803,72613,51511,58470,2,79509,67,14685,93007,803,17441,88520,802,80831,22381,17441,81640,802,17824,46431,50359,32,1729,43178,804,46989,93302,801,67604,27705,137,343,44064,88838,804,3823,43668,93268,35,46989,88838,803,94591,56594,57485,804,43015,56594,53520,801,27645,66775,132,51073,9595,1,10760,70729,804,14453,5932,91543,97196,804,63915,34,99240,28548,804,87117,17681,74753,10120,-49,88520,130,79904,93007,801,89043,48564,796,1101,65146,1,94764,50671,805,64787,48564,805,87326,74917,2,58949,36,28602,78686,805,1101,41276,88520,799,89447,54324,806,48863,22054,1,33245,15314,807,15183,34,52994,46989,88838,807,1101,75964,30014,2,14720,8,1314,23895,800,26815,55502,807,19676,5932,33245,58582,799,20489,1729,55502,800,87326,44064,70729,803,89043,67057,78429,808,94591,57335,3,40332]},"awcbb_yhh_fun84":{"variablePool":{"c_f_274":null,"c_f_275":null,"c_f_276":null,"c_f_277":null,"c_f_278":null,"c_f_279":null,"c_f_280":null,"c_f_281":null},"zhili":[28319,66775,812,63915,813,88838,814,1828,94341,30554,61626,815,93302,32,26815,50671,816,78686,32,39485,46696,817,96905,97196,813,20489,39485,27705,817,68107,65205,21557,153,19347,53,46989,27705,817,51073,36411,63365,2,46546,129,97196,36,55672,53,38865,58582,817,18442,95934,34026,56594,88520,814,67559,76874,34,56594,62250,817,77333,95018,1314,55672,812,67604,78429,132,18442,73059,1,14453,77661,26815,31786,818,44863,36,55502,53,10760,81640,817,90910,62365,95,78686,57,41666,1729,3410,814,27645,79904,54324,817,20489,31581,93007,812,3823,55502,132,89447,33310,1,78938,25960,38865,88820,819,46989,76874,815,67559,27705,36,99240,75430,816,43015,22385,41276,63915,815,94591,81640,36,79904,27705,816,20489,13157,1101,78429,60,66775,53,64787,48564,816,67604,30910,9110,93007,131,59533,35304,53520,819,43015,33245,3410,818,17824,30693,45464,44649,75935,94764,48564,816,54324,34,91543,53520,816,27645,70111,97228,26815,27705,817,23895,34,1314,62250,817,43015,49865,74214,68307,-164,79904,58582,815,87326,30554,66775,816,94197,57190,88520,780,43015,3410,616,67559,33310,2,48583]},"awcbb_yhh_fun85":{"variablePool":{"c_f_282":null,"c_f_283":null,"c_f_284":null,"c_f_285":null,"c_f_286":null,"c_f_287":null,"c_f_288":null,"awcbb_yhh_fun86":"awcbb_yhh_fun86","c_f_290":null,"awcbb_yhh_fun87":"awcbb_yhh_fun87","awcbb_yhh_fun88":"awcbb_yhh_fun88","awcbb_yhh_fun89":"awcbb_yhh_fun89","awcbb_yhh_fun90":"awcbb_yhh_fun90","c_f_328":"awcbb_yhh_fun91","c_f_337":"awcbb_yhh_fun92","c_f_346":"awcbb_yhh_fun93","c_f_355":"awcbb_yhh_fun94"},"zhili":[58582,831,81640,835,53520,899,57485,911,44863,913,53520,864,44863,865,76874,868,36951,885,68986,53520,821,4208,1314,81640,598,78938,35304,72245,822,44064,81640,822,45936,23895,602,72613,1729,84035,823,44064,48564,823,89447,28548,629,43015,39485,28718,824,67057,3410,823,94591,70729,746,87117,33245,30842,825,30554,50359,822,90910,75430,776,87117,41276,57563,826,24489,74443,49575,827,10760,88520,831,89447,6248,0,91543,93302,826,94591,62250,832,71593,93302,750,41276,93007,835,27645,31051,81640,739,88184,48564,899,67604,43833,76874,757,46989,55672,911,67559,43833,93007,625,38865,19347,913,21286,44215,33245,81640,825,90910,75430,605,88416,85708,1,75672,88184,27705,826,88416,78429,832,87326,96905,21502,914,39485,23895,822,21286,58582,832,94764,27705,914,60664,10760,50359,825,19676,70729,761,89447,65146,1,74411,74443,78429,822,67604,28548,947,46989,19347,914,89447,44064,75430,825,94591,50359,767,27645,2112,1,20586]},"awcbb_yhh_fun86":{"variablePool":{"c_f_289":null},"zhili":[11824,1644,78429,32,1729,38571,828,54324,130,30554,50359,828,89043,61295,21090,53,99240,19347,827,20489,1314,55672,828,21286,55502,32,81640,666,78429,34,26815,78686,828,19676,34701,33245,44863,821,87326,93007,829,67604,28030,1,46989,78429,821,78938,63915,830,67604,33310,1,15376,89680,15881,26815,50359,828,15183,34,39485,75430,828,77333,14353,30099,45668,-62]},"awcbb_yhh_fun87":{"variablePool":{},"zhili":[85032,1644,62423,93302,251,47906,62250,139,1286,78686,833,1286,27705,834,73670,81640,142,34238,88285,54324,824,77333,54324,610,19676,19269,1,42870]},"awcbb_yhh_fun88":{"variablePool":{"c_f_291":null,"c_f_292":null,"c_f_293":null,"c_f_294":null,"c_f_295":null,"c_f_296":null,"c_f_297":null,"c_f_298":null,"c_f_299":null,"c_f_300":null,"c_f_301":null,"c_f_302":null,"c_f_303":null,"c_f_304":null,"c_f_305":null,"c_f_306":null,"c_f_307":null,"c_f_308":null,"c_f_309":null,"c_f_310":null,"c_f_311":null,"c_f_312":null,"c_f_313":null,"c_f_314":null,"c_f_315":null,"c_f_316":null},"zhili":[7580,93302,836,88520,837,71825,63915,32,91543,30842,838,53520,96,46989,62250,838,20489,19381,68909,83,96905,44863,838,14453,28602,58582,837,67559,45493,1314,34739,839,39485,93007,836,1101,47706,28548,839,87117,343,56594,57563,840,39485,28548,836,21286,33245,27705,839,45936,28548,841,15183,60,46989,78686,840,78938,77890,78686,131,26815,93302,840,18442,3428,24974,97927,48564,842,3410,131,67057,93302,840,3823,22385,63915,60,38865,62250,840,51073,11007,10124,51407,6282,75672,64787,55502,838,55672,34,26815,93007,838,88416,17681,74214,58949,-92,17441,78686,251,67604,78429,632,72613,94764,49575,843,1314,76874,836,68107,19347,32,88184,54324,837,14453,93006,94591,79904,40830,844,44064,23895,836,43015,53520,34,88285,15183,837,90910,49988,72613,99240,15314,845,31581,62250,836,94197,63915,36,47706,28548,837,68107,34701,67559,28602,15314,846,14685,57485,836,90910,93302,38,56594,53520,837,51073,70111,18442,33245,46696,847,94764,70729,836,90910,76874,53,96905,57485,837,3823,49904,60664,74443,49575,848,1729,55502,836,67559,88520,55,38865,88520,837,43015,9359,21286,31581,80153,849,74443,76874,836,87326,75430,57,75425,93007,837,89043,63317,54271,10760,34739,850,10760,75430,836,68107,70729,42,14685,55672,837,87117,49988,18442,91543,61626,851,75425,53520,836,94591,28548,60,44064,36951,837,14453,92857,89043,94764,57563,852,75425,15183,836,89447,55502,62,47706,57485,837,87326,71104,60664,56594,40830,853,38865,27705,836,17824,53520,50,26815,55672,837,51073,93006,17824,46989,28718,854,33245,53520,836,94197,66775,65,35304,63915,837,27645,71104,51073,64787,40830,855,47706,50359,836,89447,50359,45,31581,53520,837,67604,45493,19676,56594,21502,856,31581,97196,836,78938,50359,68,39485,23895,837,60664,71340,14453,38865,21502,857,44064,66775,836,51073,57485,70,28602,44863,837,94197,54097,343,28602,72245,858,16411,81640,836,88416,88838,47,38865,76874,837,67604,94841,89043,41276,5503,859,33245,58582,843,67559,62250,32,1101,67057,83428,860,94764,44863,843,77333,78429,34,89043,14685,81076,861,16411,78429,843,27645,88838,36,67604,46989,5503,862,91543,44863,843,1101,27705,38,94591,28602,4098,863,99240,27705,860,28602,23895,860,14453,35304,58582,861,90910,74443,19347,862,21286,31581,63915,863,88416,35304,19347,844,17824,88838,42,88184,63915,827,94197,3410,32,1101,74443,28548,864,94591,90670,7,42870,94764,88520,863,96905,88520,863,18442,41276,53520,860,94197,94764,19347,861,78938,39485,62250,862,68107,38865,15183,845,94197,54324,45,28602,76874,827,43015,36951,34,67604,14685,54324,864,67604,22054,7,12453,47706,44863,862,31581,36951,862,48863,88184,15183,863,20489,41276,81640,860,43015,14685,54324,861,89043,1729,48564,846,54271,27705,48,94764,36951,827,89043,66775,36,17824,99240,76874,864,18442,16079,7,22381,35304,81640,861,64787,63915,861,343,47706,81640,862,14453,79904,57485,863,89447,35304,81640,860,14453,94764,97196,847,1101,28548,51,26815,44863,827,94197,28548,38,3823,74443,54324,864,51073,2112,7,74411,30554,62250,860,31581,53520,860,94591,88285,66775,861,43015,57190,48564,862,19676,91543,53520,863,89447,10760,55672,848,60664,19347,42,46989,93302,827,18442,50359,53,89447,56594,19347,864,67604,90670,7,84250,1314,15183,863,10760,15183,863,87326,67057,93302,860,1101,38865,27705,861,67604,88184,54324,862,21286,75425,54324,849,43015,53520,45,79904,23895,827,87117,28548,55,90910,26815,53520,864,43015,87322,7,89033,75425,63915,862,10760,54324,862,343,35304,3410,863,89447,30554,36951,860,77333,91543,23895,861,51073,88285,55672,850,88416,3410,48,99240,57485,827,87117,93302,57,54271,88285,93302,864,43015,50136,7,15881,94764,62250,861,33245,55672,861,77333,64787,88520,862,19676,88184,78429,863,51073,10760,93302,860,87326,31581,62250,851,14453,78686,51,28602,54324,827,48863,57485,42,20489,64787,97196,864,45936,28030,7,42429,1729,3410,860,46989,58582,860,51073,30554,53520,861,18442,10760,93302,862,89447,64787,66775,863,68107,74443,88838,852,72613,81640,42,38865,57485,827,51073,88838,60,90910,31581,81640,864,54271,64384,7,15881,75425,53520,863,28602,48564,863,54271,99240,50359,860,68107,94764,27705,861,45936,91543,50359,862,87117,75425,75430,853,45936,50359,45,33245,3410,827,60664,78686,62,45936,47706,15183,864,88416,60919,7,80220,31581,75430,862,33245,70729,862,77333,26815,15183,863,89043,75425,54324,860,60664,57190,54324,861,88416,46989,23895,854,94197,75430,48,28602,76874,827,20489,78686,50,43015,64787,57485,864,17824,90670,7,1857,44064,36951,861,56594,44863,861,21286,1314,76874,862,48863,88184,88520,863,89043,10760,75430,860,88416,38865,93302,855,94591,88520,51,64787,66775,827,87326,88520,65,17824,91543,75430,864,43015,28030,7,78763,91543,75430,860,94764,58582,860,87117,41276,88520,861,77333,91543,97196,862,67604,56594,50359,863,87326,91543,88838,856,67559,36951,42,64787,75430,827,43015,27705,45,77333,74443,53520,864,43015,58620,7,42870,1314,55502,863,41276,78429,863,17824,94764,48564,860,17824,38865,55672,861,89447,10760,57485,862,60664,31581,50359,857,45936,48564,45,47706,63915,827,89447,78429,68,72613,33245,28548,864,72613,8029,7,80220,1729,81640,862,46989,63915,862,67559,88285,88838,863,51073,44064,62250,860,343,64787,3410,861,1101,28602,54324,858,45936,70729,48,99240,57485,827,88416,88838,70,94197,1729,78686,864,54271,33009,7,13141,39485,27705,861,75425,15183,861,54271,16411,70729,862,89043,67057,28548,863,88416,10760,3410,860,27645,44064,55672,859,45936,44863,51,94764,66775,827,343,63915,47,19676,33245,88838,864,19676,27229,7,51541,31581,81640,860,33245,55672,860,90910,44064,44863,861,68107,35304,53520,862,48863,96905,27705,863,72613,96905,3410,845,14453,62250,55,47706,75430,827,3823,55672,96,94197,99240,88838,865,45936,89716,7,13141,1314,75430,863,56594,76874,863,343,39485,55672,860,17824,91543,48564,861,51073,38865,81640,862,89043,94764,28548,850,67604,63915,62,75425,55502,827,60664,97196,48,67604,47706,50359,865,19676,65146,7,46431,41276,93302,862,1729,76874,862,67559,99240,78686,863,87326,44064,97196,860,87326,16411,93302,861,343,26815,15183,855,60664,62250,70,10760,70729,827,27645,88520,77,87326,41276,44863,865,48863,28030,7,13141,16411,93302,861,79904,57485,861,90910,99240,63915,862,45936,75425,28548,863,17824,74443,28548,860,54271,94764,78429,844,89447,93302,44,67057,66775,827,54271,93007,866,20489,91543,23895,865,54271,32878,7,13141,1729,44863,860,26815,78429,860,72613,41276,78686,861,60664,99240,50359,862,19676,88184,58582,863,87326,67057,23895,849,1101,50359,55,88285,55672,827,48863,66775,44,67604,14685,54324,865,60664,4349,7,23997,41276,81640,863,10760,36951,863,77333,67057,50359,860,90910,79904,93007,861,48863,26815,55672,862,87117,33245,63915,854,20489,93007,62,91543,62250,827,48863,48564,94,43015,41276,62250,865,45936,58620,7,5191,67057,76874,862,31581,54324,862,88416,44064,28548,863,20489,57190,88520,860,18442,57190,57485,861,1101,64787,62250,859,20489,3410,70,14685,57485,827,14453,15183,51,17824,94764,93302,865,87326,20807,7,23997,31581,78686,861,1314,36951,861,54271,44064,55672,862,51073,1729,70729,863,94591,44064,97196,860,19676,88184,53520,848,14453,48564,44,79904,88520,827,19676,75430,75,21286,57190,44863,865,87326,8029,7,20586,16411,78429,860,31581,36951,860,18442,99240,15183,861,94591,39485,36951,862,94197,41276,93007,863,21286,46989,88520,853,87117,88520,55,91543,58582,827,72613,36951,131,94591,67057,48564,865,94591,57335,7,22381,28602,76874,863,79904,15183,863,94197,99240,50359,860,43015,57190,54324,861,67604,46989,54324,862,78938,44064,78686,858,343,93007,62,39485,93302,827,94197,58582,41,48863,88184,63915,865,27645,85708,7,66902,75425,44863,862,41276,78686,862,27645,1729,55502,863,54271,88285,88520,860,78938,10760,28548,861,1101,14685,3410,847,67604,76874,70,39485,28548,827,343,55502,111,67559,28602,55502,865,94591,33310,7,84250,28602,70729,861,1729,28548,861,94591,96905,70729,862,19676,31581,78686,863,94197,56594,66775,860,87117,1729,88520,852,87117,53520,44,39485,48564,827,67604,44863,73,14453,74443,15183,865,87117,9595,7,74411,30554,78686,860,31581,88838,860,51073,99240,97196,861,87117,57190,3410,862,51073,31581,50359,863,87117,30554,53520,857,3823,54324,55,88285,53520,827,94197,93302,92,72613,99240,66775,865,77333,22054,7,13141,79904,66775,863,47706,57485,863,94197,39485,78429,860,87117,57190,53520,861,94197,35304,28548,862,20489,14685,75430,846,27645,78429,62,33245,70729,827,51073,62250,867,89043,35304,93007,865,1101,8029,7,5191,94764,50359,862,10760,70729,862,17824,31581,55502,863,89447,67057,3410,860,14453,75425,78686,861,67604,28602,62250,851,67604,81640,70,96905,81640,827,67559,93302,590,78938,75425,78429,865,94591,73059,7,42429,30554,63915,861,39485,53520,861,51073,14685,78686,862,343,38865,81640,863,88416,57190,44863,860,17824,38865,15183,856,87117,88838,44,30554,57485,827,1101,93007,559,51073,41276,53520,865,17824,16079,7,23997,35304,23895,860,88184,76874,860,67604,91543,78429,861,51073,79904,36951,862,77333,41276,50359,863,1101,31581,28548,849,94197,55672,53,47706,48564,827,43015,3410,29,3823,79904,15183,868,89043,64384,7,13141,44064,58582,863,26815,78429,863,89447,35304,3410,860,78938,26815,55672,861,89447,88285,93007,862,45936,10760,55502,852,90910,93007,65,16411,27705,827,3823,55502,869,54271,10760,93007,868,21286,76039,7,41423,57190,93007,862,26815,66775,862,90910,96905,78686,863,14453,35304,23895,860,94591,94764,27705,861,77333,88285,53520,855,43015,54324,96,14685,27705,827,87326,3410,870,54271,10760,63915,868,67604,70959,7,46431,88184,66775,861,88184,66775,861,18442,26815,27705,862,87326,99240,53520,863,27645,96905,36951,860,27645,16411,15183,858,94197,63915,75,41276,88838,827,17824,76874,871,45936,99240,78429,868,89043,16079,7,15881,16411,55502,860,57190,93302,860,20489,46989,36951,861,72613,94764,63915,862,67604,64787,76874,863,48863,16411,19347,845,48863,78429,53,46989,62250,827,72613,57485,872,89043,47706,76874,868,343,90670,7,74411,96905,88520,863,91543,93302,863,27645,10760,76874,860,89043,38865,50359,861,77333,57190,97196,862,19676,67057,93302,848,78938,55502,65,10760,15183,827,89447,70729,873,1101,10760,63915,868,14453,32878,7,13606,26815,48564,862,57190,62250,862,68107,88184,48564,863,14453,1314,15183,860,14453,56594,53520,861,48863,38865,78686,851,20489,53520,96,88285,44863,827,54271,50359,874,27645,64787,15183,868,60664,6248,7,46431,74443,75430,861,91543,3410,861,21286,46989,36951,862,14453,47706,55672,863,67559,88184,97196,860,72613,75425,78686,854,89447,93007,75,94764,88520,827,89043,3410,875,90910,28602,55672,868,67604,9595,7,42429,99240,3410,860,1729,48564,860,17824,41276,62250,861,88416,28602,44863,862,72613,74443,48564,863,68107,79904,36951,857,54271,88838,53,41276,58582,827,27645,81640,876,87326,64787,78429,868,87326,7744,7,71057,75425,63915,863,31581,36951,863,68107,38865,97196,860,54271,14685,70729,861,89447,96905,78429,862,19676,99240,19347,844,89447,78686,65,38865,48564,827,27645,3410,877,1101,39485,19347,868,67559,22054,7,57221,41276,97196,862,56594,66775,862,67559,31581,63915,863,343,1314,54324,860,60664,14685,3410,861,94591,64787,54324,847,94591,93302,96,1729,55672,827,94197,50359,878,90910,41276,55672,868,68107,17274,7,89033,39485,54324,861,1314,62250,861,77333,38865,50359,862,19676,44064,93302,863,77333,41276,50359,860,51073,31581,57485,850,89447,93302,75,64787,27705,827,3823,88520,879,18442,16411,44863,868,48863,22054,7,78763,57190,44863,860,64787,50359,860,94591,14685,54324,861,89043,75425,70729,862,60664,10760,70729,863,88416,88184,23895,853,88416,3410,53,16411,50359,827,72613,70729,880,54271,74443,88838,868,89043,33310,7,42429,75425,76874,863,14685,78429,863,94197,35304,28548,860,94591,33245,66775,861,54271,28602,78686,862,68107,10760,19347,856,20489,62250,65,38865,44863,827,54271,93302,881,19676,33245,70729,868,94197,8600,7,23997,88184,93302,862,33245,58582,862,78938,46989,78686,863,68107,99240,63915,860,48863,94764,62250,861,94197,33245,93007,859,45936,88520,96,96905,48564,827,343,54324,882,45936,99240,44863,868,87326,64384,7,42429,56594,55502,861,14685,76874,861,77333,38865,76874,862,3823,79904,27705,863,94197,16411,27705,860,48863,28602,55502,846,19676,48564,75,64787,78686,827,1101,93007,883,343,47706,88838,868,54271,32878,7,78763,96905,78686,860,39485,36951,860,3823,67057,78429,861,45936,31581,53520,862,3823,35304,78686,863,45936,1729,23895,844,68107,27705,57,30554,50359,827,89043,58582,884,67604,1729,54324,885,87117,73059,7,5191,33245,27705,863,91543,97196,863,54271,10760,88520,860,77333,44064,88520,861,17824,33245,55502,862,89043,26815,58582,851,19676,76874,50,30554,19347,827,21286,93302,886,94591,28602,55502,885,72613,70959,7,84250,67057,66775,862,39485,54324,862,14453,38865,36951,863,21286,44064,36951,860,88416,88184,81640,861,72613,31581,81640,858,18442,19347,47,79904,55672,827,67559,27705,887,343,16411,62250,885,67559,76039,7,57221,31581,78429,861,14685,57485,861,3823,56594,75430,862,3823,75425,78429,863,27645,39485,55502,860,87326,31581,58582,849,87117,15183,94,28602,19347,827,78938,63915,888,88416,47706,19347,885,67604,4349,7,13141,39485,53520,860,30554,66775,860,89043,1314,3410,861,67559,56594,50359,862,94591,1314,28548,863,77333,44064,28548,856,19676,76874,57,46989,53520,827,89447,97196,889,87117,57190,57485,885,19676,28030,7,80220,67057,76874,863,47706,48564,863,90910,38865,55502,860,54271,88184,76874,861,1101,33245,55502,862,343,31581,28548,847,67559,54324,50,41276,19347,827,18442,93007,890,77333,16411,28548,885,14453,85708,7,12453,91543,66775,862,88184,62250,862,94591,35304,44863,863,94197,26815,75430,860,67559,94764,78429,861,14453,16411,70729,854,343,75430,47,64787,36951,827,78938,93007,891,43015,79904,88520,885,72613,87322,7,66902,79904,23895,861,26815,55502,861,17824,57190,78686,862,67604,75425,54324,863,94591,14685,76874,860,19676,41276,75430,845,14453,75430,94,75425,97196,827,14453,57485,151,87326,94764,76874,885,77333,8600,7,51541,94764,3410,860,14685,53520,860,21286,16411,50359,861,89043,94764,93302,862,67559,38865,78686,863,45936,14685,78686,852,3823,55502,57,88184,75430,827,94197,58582,892,90910,64787,3410,885,45936,89716,7,52926,31581,78686,863,74443,88520,863,94197,96905,63915,860,3823,96905,36951,861,89447,99240,62250,862,343,31581,55672,859,87326,78686,50,35304,36951,827,21286,88838,893,18442,67057,88520,885,54271,8600,7,75672,30554,54324,862,91543,15183,862,94591,26815,63915,863,89043,88184,27705,860,3823,47706,54324,861,343,1729,70729,850,94197,48564,47,96905,27705,827,77333,55502,894,90910,28602,62250,885,3823,20807,7,23997,57190,57485,861,88285,55502,861,21286,74443,70729,862,68107,35304,19347,863,87117,33245,55672,860,89447,88285,3410,857,27645,28548,94,64787,97196,827,89447,76874,895,89447,56594,44863,885,18442,27229,7,66902,35304,62250,860,74443,76874,860,27645,41276,70729,861,68107,88184,15183,862,14453,88285,27705,863,68107,14685,75430,848,87117,55672,57,38865,78429,827,18442,70729,896,94197,74443,27705,885,343,7744,7,12453,64787,28548,863,31581,53520,863,72613,16411,78429,860,88416,26815,81640,861,94197,88184,19347,862,48863,88285,54324,855,72613,50359,50,67057,36951,827,51073,55502,897,27645,26815,93007,885,88416,76039,7,71057,47706,54324,862,26815,3410,862,87117,96905,36951,863,27645,46989,58582,860,94591,91543,54324,861,3823,88184,55502,846,67604,66775,47,91543,66775,827,89043,70729,898,90910,14685,76874,885,19676,32878,7,12453,1729,66775,861,57190,15183,861,68107,44064,53520,862,68107,46989,3410,863,343,14685,44863,860,89447,1729,19347,853,54271,54324,94,47706,88838,827,45936,70729,558,343,31581,78686,885,14453,50136,7,27777,94764,93007,843,60664,58582,32,27705,32,38865,55672,860,89043,1729,55502,843,1101,78686,32,19676,54331,24972,75672,94764,57485,843,19676,62250,34,54324,32,57190,57485,861,43015,99240,23895,843,78938,76874,34,17824,11855,81394,71057,57190,54324,843,94197,76874,36,27705,32,94764,63915,862,60664,1314,3410,843,87326,76874,36,45936,82493,29915,52926,91543,70729,843,51073,19347,38,27705,32,88285,53520,863,68107,39485,81640,843,88416,50359,38,60664,14353,71766,74411]},"awcbb_yhh_fun89":{"variablePool":{"c_f_317":null,"c_f_318":null,"c_f_319":null,"c_f_320":null,"c_f_321":null,"c_f_322":null,"c_f_323":null,"c_f_324":null,"c_f_325":null,"c_f_326":null},"zhili":[42801,58382,31238,55672,719,87117,26815,81331,900,41276,88520,900,88416,97196,632,19676,46989,43178,901,88838,60,43847,88520,720,67559,59696,56594,88820,902,53520,60,75425,28548,900,94197,27705,633,77333,71865,41276,83428,903,75425,70729,901,343,3410,55,30554,53520,903,88416,52731,31581,97196,901,94591,62250,55,88285,66775,903,48863,1194,87326,78686,29,1729,28548,903,43015,54718,28548,131,59533,57485,150,54996,70675,19293,93007,666,57190,62250,902,27645,72863,30554,36951,821,88416,88838,904,89447,33009,1,33245,40830,905,44064,78686,902,77333,1314,31786,906,26815,53520,901,51073,53520,47,63915,53,19347,62,54324,130,56594,44863,903,54271,71340,76468,18385,70111,28548,841,62250,60,99240,62250,905,19676,25964,93007,131,88184,88838,905,3823,4728,98823,18508,93302,842,93007,131,79904,44863,905,94591,25960,76874,60,46989,88838,905,51073,4753,36713,32367,16370,5932,41276,57485,901,89447,36951,70,15183,53,54324,62,66775,130,96905,66775,903,343,82493,72753,60667,70302,93007,841,78686,60,96905,88838,906,54271,39406,75430,131,74443,97196,906,72613,91558,36561,55281,66775,842,23895,131,26815,53520,906,87326,52731,50359,60,67057,36951,906,89043,54996,75712,31572,41325,80220,10760,81640,900,94197,3410,633,36951,53,28548,34,94764,97196,901,20489,53520,137,21286,82493,27615,13141,88695,88838,726,51073,8600,0,43847,44863,251,18442,79904,4098,907,46989,54324,907,77333,53520,632,78938,38865,50671,908,63915,32,31581,50927,909,63915,53,31581,78686,909,1101,45692,11798,71,47706,58582,908,94591,96905,55672,909,343,3823,38865,11000,910,96905,19347,908,19676,88285,57485,909,68107,88838,841,78429,60,88285,44863,910,87117,14809,63915,131,31581,48564,910,77333,27029,26326,2147,55672,842,97196,131,79904,50359,910,51073,84586,70729,60,1729,57485,910,87326,45227,5972,31572,45256,15881,96905,44863,909,55502,34,41276,88838,909,60664,14353,80829,83833,-80,38865,54324,907,21286,54628]},"awcbb_yhh_fun90":{"variablePool":{"c_f_327":null},"zhili":[6310,82018,7020,47706,27705,825,51073,50359,625,68107,76874,305,27645,8600,1,26815,65652,912,56594,19347,912,51073,62250,251,25887,70729,251,67604,53520,625,19676,27229,0,5932,35304,3410,912,89043,87161]},"awcbb_yhh_fun91":{"variablePool":{"c_f_329":null,"c_f_330":null,"c_f_331":null,"c_f_332":null,"c_f_333":null,"c_f_334":null,"c_f_335":null,"c_f_336":null},"zhili":[4852,66775,915,15183,916,54324,917,88838,918,78686,919,3410,920,54324,921,82018,56594,50359,921,72613,88184,15183,919,3823,31581,57485,918,19676,64787,48564,916,89043,85278,46482,30554,19347,917,68107,94764,3410,916,27645,88559,67834,99240,62250,915,87117,38224,5505,94841,39485,65652,922,47706,48564,916,87117,96905,3410,920,3823,44863,29,1492,33245,23895,922,19676,42991,33245,97196,920,67604,88285,76874,922,94197,27691,36561,54097,74047]},"awcbb_yhh_fun92":{"variablePool":{"c_f_338":null,"c_f_339":null,"c_f_340":null,"c_f_341":null,"c_f_342":null,"c_f_343":null,"c_f_344":null,"c_f_345":null},"zhili":[96327,97196,923,48564,924,15183,925,23895,926,28548,927,44863,928,54324,929,97348,14685,50359,929,17824,91543,88838,927,48863,94764,53520,926,343,70297,16411,50359,925,48863,14515,16411,81640,926,67559,14685,63915,924,17824,18508,16370,1314,88838,923,78938,38224,70302,49904,88184,49575,930,38865,19347,924,78938,28602,53520,928,87326,27705,29,99623,1314,63915,930,51073,22385,94764,53520,928,72613,47706,54324,930,20489,16751,87519,5505,12237]},"awcbb_yhh_fun93":{"variablePool":{"c_f_347":null,"c_f_348":null,"c_f_349":null,"c_f_350":null,"c_f_351":null,"c_f_352":null,"c_f_353":null,"c_f_354":null},"zhili":[28319,50359,931,44863,932,93007,933,88838,934,55672,935,3410,936,55672,937,85598,26815,57485,937,88416,35304,93302,935,89447,1314,15183,934,17824,41276,88520,933,45936,91543,53520,932,94591,28741,77828,96905,23895,931,78938,93006,38765,94841,75425,88820,938,44064,55502,932,67604,28602,36951,936,45936,93302,29,24173,44064,3410,938,19676,86028,10760,81640,936,88416,10760,3410,938,68107,27029,10124,82493,78598]},"awcbb_yhh_fun94":{"variablePool":{"c_f_356":null,"c_f_357":null,"c_f_358":null,"c_f_359":null,"c_f_360":null,"c_f_361":null,"c_f_362":null,"c_f_363":null},"zhili":[96327,36951,939,19347,940,36951,941,76874,942,28548,943,36951,944,93007,945,85598,10760,15183,945,19676,33245,70729,943,14453,33245,28548,942,14453,52164,47706,58582,940,68107,16370,44064,3410,941,94591,33406,39485,15183,939,19676,93006,71340,5505,28602,31786,946,46989,3410,940,67559,64787,66775,944,87326,44863,29,1492,14685,97196,946,87117,25960,39485,58582,944,1101,39485,78686,946,78938,3428,10124,70302,40332]},"awcbb_yhh_fun95":{"variablePool":{"c_f_364":null,"c_f_365":null,"c_f_366":null,"c_f_367":null,"c_f_368":null,"c_f_369":null,"c_f_370":null,"awcbb_yhh_fun96":"awcbb_yhh_fun96","awcbb_yhh_fun97":"awcbb_yhh_fun97","awcbb_yhh_fun98":"awcbb_yhh_fun98"},"zhili":[88520,960,48564,972,66775,977,47566,74131,35304,54324,598,45936,99240,57563,949,88184,54324,949,3823,97196,602,60664,41276,65652,950,33245,36951,950,89043,44863,604,90910,99240,11000,951,30554,75430,950,78938,36951,629,27645,99240,31786,952,14685,54324,949,21286,78429,776,45936,56594,49575,953,88184,53520,953,19676,93302,832,18442,41276,57563,954,88184,44863,953,87117,27705,955,96856,55502,747,42344,66775,956,50359,29,48564,150,15253,69395,36951,957,30554,78686,954,20489,1800,55502,958,36951,34,98487,33245,53520,951,27645,27705,605,67559,9595,1,99543,44863,610,88184,23895,960,43015,26647,81640,961,91543,27705,972,87117,53877,28602,57485,951,89043,23895,605,14453,8029,1,23997,33245,15183,953,45936,78429,955,51073,10760,4098,973,88184,55672,949,68107,97196,955,96905,93007,977,27645,66902]},"awcbb_yhh_fun96":{"variablePool":{"c_f_371":null},"zhili":[30657,78686,959,46773,95729,54324,747,99240,78686,959,54271,20167,27705,747,14453,58582,605,60664,90670,1,22381]},"awcbb_yhh_fun97":{"variablePool":{"c_f_372":null,"c_f_373":null,"c_f_374":null,"c_f_375":null,"c_f_376":null,"c_f_377":null,"c_f_378":null,"c_f_379":null,"c_f_380":null,"c_f_381":null},"zhili":[42801,57485,962,58582,963,6103,20167,23895,747,3823,30554,61626,964,1314,66775,964,20489,50359,957,45936,66775,616,94591,7744,0,26815,31786,965,88285,15183,952,94591,57485,616,94197,76039,0,47706,38571,966,75425,70729,966,43015,93007,632,21286,31581,94617,967,75425,78429,964,51073,44863,956,54271,30554,11000,968,38865,19347,964,27645,53520,958,89447,99240,6432,969,10760,58582,968,68107,91543,78686,967,343,54324,137,94197,57446,40777,125,88184,48564,970,18442,28273,2,58949,13,31581,28548,970,89043,75425,28548,965,67604,93302,752,43015,65146,1,47706,62250,963,343,74443,93007,962,60664,88285,53520,965,72613,63915,752,77333,33009,1,78686,755,54271,85708,1,16411,83428,970,10760,58582,965,54271,57485,239,3823,27229,0,55502,34,26815,65652,971,1729,75430,969,43015,79904,27705,971,60664,92142,23295,39,96905,78686,970,91543,50359,970,88416,1729,57485,965,90910,54324,755,67559,85708,1,74411,41276,3410,965,60664,3410,239,88416,65146,0,57190,55502,971,66775,34,56594,78686,971,1101,70111,74158,58949,-50,56594,48564,970,19676,67057,93302,966,94197,66775,639,54271,32878,1,99573,-139,30554,88520,966,17824,28548,633,70729,53,47706,23895,968,67604,51053,57221,41276,44863,966,27645,54628]},"awcbb_yhh_fun98":{"variablePool":{"c_f_382":null,"c_f_383":null,"c_f_384":null},"zhili":[56812,19347,974,97196,975,62250,976,4458,30554,93007,974,94591,44064,78686,975,14453,88285,88838,976,20489,75425,78429,973,54271,28548,616,14453,85708,1,88520,961,87117,8029,2,71787]},"awcbb_yhh_fun99":{"variablePool":{"c_f_385":null,"c_f_386":null,"c_f_387":null,"c_f_388":null,"c_f_389":null,"c_f_390":null,"c_f_391":null,"c_f_392":null,"c_f_393":null,"c_f_394":null,"c_f_395":null,"c_f_396":null,"awcbb_yhh_fun100":"awcbb_yhh_fun100","awcbb_yhh_fun101":"awcbb_yhh_fun101","awcbb_yhh_fun102":"awcbb_yhh_fun102","awcbb_yhh_fun103":"awcbb_yhh_fun103","awcbb_yhh_fun104":"awcbb_yhh_fun104","awcbb_yhh_fun105":"awcbb_yhh_fun105","awcbb_yhh_fun106":"awcbb_yhh_fun106","c_f_416":null,"awcbb_yhh_fun111":"awcbb_yhh_fun111","c_f_418":null,"c_f_419":null,"awcbb_yhh_fun112":"awcbb_yhh_fun112","awcbb_yhh_fun113":"awcbb_yhh_fun113","awcbb_yhh_fun114":"awcbb_yhh_fun114","c_f_426":null,"awcbb_yhh_fun115":"awcbb_yhh_fun115","c_f_445":null,"c_f_446":null,"awcbb_yhh_fun119":"awcbb_yhh_fun119","awcbb_yhh_fun120":"awcbb_yhh_fun120","c_f_457":null,"awcbb_yhh_fun121":"awcbb_yhh_fun121","awcbb_yhh_fun122":"awcbb_yhh_fun122","awcbb_yhh_fun123":"awcbb_yhh_fun123","c_f_468":null,"awcbb_yhh_fun124":"awcbb_yhh_fun124","awcbb_yhh_fun125":"awcbb_yhh_fun125","c_f_471":null,"c_f_472":null,"awcbb_yhh_fun126":"awcbb_yhh_fun126","awcbb_yhh_fun127":"awcbb_yhh_fun127","c_f_482":null,"awcbb_yhh_fun128":"awcbb_yhh_fun128","awcbb_yhh_fun129":"awcbb_yhh_fun129","awcbb_yhh_fun130":"awcbb_yhh_fun130","c_f_497":null,"c_f_498":null,"awcbb_yhh_fun131":"awcbb_yhh_fun131","c_f_505":null,"awcbb_yhh_fun132":"awcbb_yhh_fun132","awcbb_yhh_fun133":"awcbb_yhh_fun133"},"zhili":[28548,995,78429,1000,63915,1006,15183,1007,3410,1010,55672,1013,48564,1031,28548,1036,58582,1044,57485,1048,78686,1053,78686,1079,48564,1092,63915,1096,97196,1107,88520,1110,27705,1113,3410,1117,66775,1120,27705,1133,76874,1138,57485,1150,53520,1157,78686,1160,3410,1170,70729,1179,76874,1186,67122,66775,980,11653,1729,76874,598,54271,31581,81331,981,39485,76874,981,45936,50359,602,18442,96905,34739,982,16411,28548,982,90910,48564,604,77333,88285,50671,983,31581,62250,982,68107,27705,629,27645,30554,49575,984,41276,78686,982,19676,50359,718,17824,30554,80153,985,75425,76874,981,67604,19347,677,94591,10760,83428,986,30554,19347,986,54271,55502,707,1101,14685,11000,987,10760,93302,986,54271,55672,782,45936,14685,61626,988,1314,88838,981,89043,55502,776,20489,57190,49575,989,94764,75430,989,87326,53520,955,94591,33245,50671,990,39485,88838,982,88416,62250,979,96856,62250,747,99240,55672,983,89447,76874,605,78938,85708,0,42433,27705,991,96905,57485,995,19676,37744,3410,996,10760,66775,1000,18442,80217,3410,610,39485,55502,1006,87326,85110,53520,239,79904,66775,1007,67559,83964,3410,1008,46989,78686,1010,90910,78655,19347,755,47706,54324,1013,94591,99543,36951,956,58582,29,55672,150,70953,26647,3410,1014,55502,29,54324,150,72863,26647,88838,994,57485,34,32512,44863,999,23895,36,42433,62250,761,41276,50359,1031,87117,7744,0,920,30554,97196,985,94591,50359,605,94197,60919,1,74411,41276,53520,982,343,88520,979,48863,31581,81076,1032,75425,48564,982,67604,76874,1033,11951,78429,757,1729,88520,1036,343,83964,57485,731,48564,34,84099,47706,53520,1032,88416,23895,605,88416,60919,1,80220,14685,88520,982,43015,15183,1033,48863,39485,28718,1037,33245,81640,981,67559,28548,1038,83837,48910,74443,78429,981,94197,55502,1038,18442,79904,40830,1039,33245,48564,982,94591,54324,1040,23627,97196,991,64787,15183,1044,20489,80217,19347,996,94764,78429,1048,90910,81533,88838,610,67057,15183,1053,43015,99543,46989,81640,983,54271,62250,605,48863,57335,1,71057,28602,36951,982,89043,78429,1040,94591,39485,40830,1054,75425,63915,1039,60664,97196,1055,91543,70729,1079,94591,60919,0,74411,16411,50359,1039,27645,53520,1055,87326,67057,50671,1080,26815,53520,981,67559,78686,1081,43953,5191,96905,28548,981,51073,55672,1081,68107,74443,81076,1082,28602,3410,1082,60664,53520,1083,4762,58582,1081,16411,62250,1092,51073,26647,63915,1093,88184,88520,1096,1101,37744,57221,99240,36951,1082,21286,81640,1083,67559,88184,40830,1097,57190,81640,982,45936,97196,1098,31617,55672,747,66001,93007,1038,79904,78686,1080,77333,81533,76874,1099,96905,3410,1097,19676,69395,1729,55672,1032,19676,23895,747,78938,93007,605,17824,7744,1,32954,48564,239,57190,97196,1107,14453,83964,78429,739,26815,55502,1110,18442,5449,54324,757,64787,55672,1113,1101,81533,93302,731,63915,29,78686,150,13851,32512,91543,27705,1032,1101,55672,605,87117,28030,1,12453,16411,28548,982,14453,19347,1098,14453,94764,49575,1114,79904,36951,982,20489,27705,1115,49359,62250,610,16411,88520,1117,14453,31051,15183,155,56594,81640,1120,94197,44215,41276,97196,983,45936,36951,605,45936,70959,1,23997,75425,76874,982,67604,58582,1115,20489,99240,94617,1121,91543,93007,981,20489,75430,1122,42621,75672,64787,88838,981,77333,48564,1122,68107,16411,40830,1123,56594,50359,1123,45936,23895,1124,47792,57485,637,47706,63915,1133,51073,5449,81640,687,99240,44863,1138,3823,31051,27777,31581,66775,1123,89447,78429,1124,78938,39485,65652,1139,28602,76874,982,21286,58582,1140,52987,66775,747,42344,3410,1122,26815,55502,1139,88416,26647,79904,78429,983,19676,93007,605,51073,58620,1,38023,50359,1019,74443,3410,1150,94591,32954,27705,1025,14685,50359,1157,20489,81533,93302,1155,57190,78429,1160,89043,99543,30554,88520,983,20489,53520,605,21286,28030,1,48373,1314,53520,982,3823,55672,1140,60664,74443,49575,1016,35304,88520,981,67559,3410,1161,45071,27777,41276,19347,981,94197,57485,1161,78938,75425,34739,1162,16411,81640,1162,17824,93302,1124,47792,28548,1163,47706,76874,1170,14453,26647,1857,28602,48564,1162,17824,15183,1124,17824,96905,72245,1171,16411,55502,982,90910,62250,1172,42344,88838,747,57396,70729,1161,33245,81640,1171,343,99543,74443,3410,1016,14453,58582,747,20489,93007,605,48863,76039,1,78655,28548,1019,31581,55672,1179,78938,43833,50359,1025,47706,66775,1186,19676,32954,67057,93007,1016,27645,57485,605,89447,73059,1,48373,96905,55672,982,67559,62250,1172,88416,1729,65652,1017]},"awcbb_yhh_fun100":{"variablePool":{"c_f_397":null,"c_f_398":null},"zhili":[85032,53520,992,44863,993,46773,48012,44863,994,78938,96905,28548,992,94591,67057,93007,993,88416,19046,88520,616,89447,17274,3,87161]},"awcbb_yhh_fun101":{"variablePool":{"c_f_399":null,"c_f_400":null},"zhili":[40277,66775,997,75430,998,40793,46362,75430,999,94197,91543,97196,997,89447,57190,44863,998,87117,62423,23895,616,3823,16079,3,19327]},"awcbb_yhh_fun102":{"variablePool":{"c_f_401":null,"c_f_402":null,"c_f_403":null},"zhili":[55069,27705,1001,23895,1002,28548,1003,50626,19046,48564,747,14685,66775,1003,18442,88695,78686,747,94591,58582,605,27645,8600,1,84250,43847,78429,1004,47706,78686,1001,94591,42870,7020,44863,1005,26815,63915,1002,27645,12453,29904,88520,239,48863,5044,0]},"awcbb_yhh_fun103":{"variablePool":{},"zhili":[49364,4458,46362,88184,66775,985,67559,48564,239,67559,57485,305,48863,60919,1,2580,3410,750,89447,76039,0]},"awcbb_yhh_fun104":{"variablePool":{"c_f_404":null},"zhili":[56812,53520,1009,25186,14685,62250,1009,18442,43847,27705,722,72613,7744,1,55210,28548,726,54271,2112,0,73591]},"awcbb_yhh_fun105":{"variablePool":{"c_f_405":null,"c_f_406":null},"zhili":[74066,28548,1011,6103,96905,50359,1011,87326,8500,2,11949,10,91543,55502,1011,18442,965,50359,722,94197,87322,1,46362,15183,757,17824,87322,0,26815,81331,1012,64787,53520,1012,88416,70202]},"awcbb_yhh_fun106":{"variablePool":{"c_f_407":"awcbb_yhh_fun107","awcbb_yhh_fun108":"awcbb_yhh_fun108"},"zhili":[15183,1023,93302,1030,30906,11653,28602,66775,1030,88416,48113]},"awcbb_yhh_fun107":{"variablePool":{"c_f_408":null},"zhili":[87472,93302,1015,14126,19347,724,35304,88838,1015,72613,86140,33717,97012,7,10760,15183,1016,68107,36227,35343,5,67057,66775,1017,94591,74047]},"awcbb_yhh_fun108":{"variablePool":{"c_f_409":null,"awcbb_yhh_fun109":"awcbb_yhh_fun109","awcbb_yhh_fun110":"awcbb_yhh_fun110"},"zhili":[19347,1024,19347,1029,28319,44863,1018,14126,47792,62250,1019,38865,50359,1024,54271,80217,66775,1025,47706,36951,1029,18442,90204,48583]},"awcbb_yhh_fun109":{"variablePool":{"c_f_410":null,"c_f_411":null,"c_f_412":null},"zhili":[83923,50359,1020,70729,1021,53520,1022,1828,64787,93007,1018,78938,16411,93007,1020,94197,39485,78429,1021,17824,41276,19347,1022,94197,47706,78429,1021,20489,41276,62250,1023,89447,8600,1,15183,1019,67604,65146,4,13256]},"awcbb_yhh_fun110":{"variablePool":{"c_f_413":null,"c_f_414":null,"c_f_415":null},"zhili":[42801,48564,1026,97196,1027,3410,1028,40217,14685,50359,1018,1101,46989,88520,1026,68107,14685,81640,1027,94591,30554,93302,1028,1101,75425,55502,1027,343,96905,55502,1023,21286,33009,1,75430,1025,77333,90670,4,44119]},"awcbb_yhh_fun111":{"variablePool":{"c_f_417":null},"zhili":[55069,14126,44863,1034,7701,4117,7020,97196,726,60664,33310,1,14685,46696,1035,26815,97196,1035,48863,41090]},"awcbb_yhh_fun112":{"variablePool":{"c_f_420":null,"c_f_421":null},"zhili":[6310,53520,1041,27705,1042,1828,31581,28548,1041,60664,39485,81640,1042,18442,46362,55502,1043,90910,58582,616,94197,70959,2,41090]},"awcbb_yhh_fun113":{"variablePool":{"c_f_422":null,"c_f_423":null},"zhili":[85032,23895,1045,54324,1046,4458,94764,27705,1045,88416,64787,88520,1046,87117,17441,3410,1047,88416,57485,616,21286,32878,2,12452]},"awcbb_yhh_fun114":{"variablePool":{"c_f_424":null,"c_f_425":null},"zhili":[85496,57485,1049,81640,1050,25186,49831,63915,1051,74443,78686,1049,3823,5932,43847,97196,1052,31581,50359,1050,68107,1857]},"awcbb_yhh_fun115":{"variablePool":{"c_f_427":null,"awcbb_yhh_fun116":"awcbb_yhh_fun116","awcbb_yhh_fun117":"awcbb_yhh_fun117","c_f_437":"awcbb_yhh_fun118"},"zhili":[54324,1065,15183,1072,3410,1062,38452,51122,39485,53520,1054,1101,54324,605,54271,16079,0,31581,6432,1056,94764,63915,1056,87117,78686,1043,43953,66775,1057,26815,3410,1065,18442,920,14685,53520,1056,89043,50359,605,67559,17274,1,13606,26815,81640,1056,78938,66775,1047,52508,50359,1057,88285,93007,1072,67604,43833,57190,15183,1056,94197,66775,605,88416,33310,1,75672,79904,58582,1056,67604,36227]},"awcbb_yhh_fun116":{"variablePool":{"c_f_428":null,"c_f_429":null,"c_f_430":null,"c_f_431":null},"zhili":[40277,81640,1058,19347,1059,9455,43847,15183,1051,17824,1729,40830,1060,35304,93302,1060,90910,88838,731,67559,96905,15314,1061,95729,31581,70729,1058,89447,96905,66775,1059,68107,47706,75430,1061,27645,44064,88520,1062,3823,55502,305,60664,58620,4,88285,66775,1058,88416,88184,55672,1059,87117,30554,63915,1060,18442,44863,1063,45936,32878,2,88695,62250,1064,14685,19347,1059,94591,96905,44863,1061,43015,14685,58582,1059,89043,5505,88285,76874,1058,67604,44863,192,21286,70959,2,52926]},"awcbb_yhh_fun117":{"variablePool":{"c_f_432":null,"c_f_433":null,"c_f_434":null,"c_f_435":null,"c_f_436":null},"zhili":[42801,27705,1066,55672,1067,66522,95101,81640,1051,14453,16411,81076,1068,47706,44863,1068,20489,3410,731,60664,26815,88820,1069,39485,48564,1067,51073,16411,78429,1069,87326,26815,55672,1067,1101,63317,47706,75430,1066,78938,50359,192,14453,60919,2,75425,57563,1070,44064,66775,1066,88416,46989,54324,1067,343,64787,15183,1068,18442,88520,1071,89447,33310,2,49831,1314,88838,1066,78938,16411,19347,1067,19676,46989,27705,1069,21286,41276,58582,1062,94591,70729,305,77333,20807,4,2580,23895,1064,46989,88520,1070,87117,15881]},"awcbb_yhh_fun118":{"variablePool":{"c_f_438":null,"c_f_439":null,"c_f_440":null,"c_f_441":null,"c_f_443":null,"c_f_444":null},"zhili":[6310,55502,1073,70729,1074,55672,1075,77916,7020,66775,1052,343,26815,15314,1076,31581,50359,1076,78938,8500,9,96137,50359,1064,87326,88184,60609,1077,95293,15,56594,58582,1076,67604,56594,80153,1077,84563,97196,1052,46989,62250,980,88416,42870,48564,32,39485,81076,1078,94764,78686,1075,19676,46989,23895,1078,51073,40257,68909,51,56594,27705,1073,1101,1729,93302,1078,43015,56594,23895,1074,67604,54097,33245,75430,1073,51073,47706,63915,1078,67559,47706,54324,1074,67559,54097,43015,31581,55502,1077,1101,96905,63915,1078,48863,18442,57338,74753,33245,54324,1078,88520,34,10760,27705,1078,89447,38765,73914,27026,-62]},"awcbb_yhh_fun119":{"variablePool":{"c_f_447":null,"c_f_448":null,"c_f_449":null,"c_f_450":null,"c_f_451":null,"c_f_452":null,"c_f_453":null,"c_f_454":null},"zhili":[96327,58582,1084,54324,1085,63158,63915,53,1314,50359,1085,343,28669,79904,34739,1086,31581,50359,1086,19676,44064,55672,1084,1101,36951,633,21286,75992,75425,15183,1086,72613,92472,39485,50927,1087,35304,28548,1087,67604,50359,60,47706,19347,1087,1101,3428,70729,96,46989,23895,1087,94591,32351,76874,131,88184,50359,1087,51073,3428,71255,75712,70675,75425,49575,1088,22505,47706,4098,1089,62250,32,33245,57563,1090,1314,3410,1087,20489,16411,97196,1090,90910,92142,7598,26,33245,54324,1088,89043,10760,19347,1089,89447,55502,238,88416,4349,1,30554,15183,1090,70729,53,26815,78686,1090,27645,38765,77074,68307,-37,14685,23895,1089,343,88285,88838,1087,89447,30554,63915,984,18442,93302,616,27645,33310,2,38865,40830,1091,88285,36951,1091,89447,67057,48564,1084,20489,15183,639,18442,90670,1]},"awcbb_yhh_fun120":{"variablePool":{"c_f_455":null,"c_f_456":null},"zhili":[83923,81640,1094,85598,78429,647,39485,97196,1094,51073,62250,632,94591,76874,36,70729,34,64787,88838,1094,43015,23895,633,60664,19349,42991,51073,41244,30554,60609,1095,28602,66775,1094,51073,88838,633,33245,27705,1094,54271,36951,633,17824,10760,53520,1095,67604,18850,26645]},"awcbb_yhh_fun121":{"variablePool":{"c_f_458":null,"c_f_459":null,"c_f_460":null,"c_f_462":null},"zhili":[47566,82018,48012,10760,50359,1032,21286,55672,239,77333,78429,305,20489,6248,1,9662,54324,747,68107,16411,43178,1100,67057,57485,1100,17824,36951,1101,87326,88184,81331,1102,99240,66775,1100,43015,93302,1038,43015,10760,81076,1103,7020,23895,994,67604,79989,54324,1004,45936,79675,11792,18,10760,63915,1103,20489,76874,996,68107,33245,38571,1104,43847,36951,735,81640,34,57221,35343,10,91543,97196,1103,18442,54324,991,94197,88285,50671,1104,20167,19347,1105,87117,36106,12,67057,93007,1104,87117,48012,28548,1105,94197,76874,1106,87117,91721,63605,44,62423,15183,1105,47706,97196,1103,14453,20167,47706,55502,1102,89043,14288,7,1314,97196,1102,88416,93302,632,60664,26815,66775,1104,14453,3410,305,45936,8029,3,42870,2264,23895,1105,51073,93007,1106,10760,58582,1104,60664,66902,7838,23,55210,26815,50359,1102,78938,10504,7,1314,15183,1102,18442,15183,632,77333,99595,88520,1105,88416,58582,610,18442,57335,2]},"awcbb_yhh_fun122":{"variablePool":{"c_f_463":null,"c_f_464":null},"zhili":[38452,36951,1108,50359,1109,50626,28602,66775,1108,20489,67057,55672,1109,43015,9662,78686,1105,18442,55502,1057,87117,16079,2]},"awcbb_yhh_fun123":{"variablePool":{"c_f_465":null,"c_f_467":null},"zhili":[74066,46773,86011,93007,747,14453,97196,1099,67559,26815,83428,1111,7020,15183,994,343,57870,93007,1004,48863,50933,87551,28,19347,1034,45099,5272,46362,57485,726,3823,17274,1,64787,38571,1112,46989,66775,1112,21286,16411,19347,1111,19676,78429,1093,89043,73059,1,35343,30,2264,3410,719,20489,57870,28548,731,343,10760,62250,1111,48863,55672,1081,18442,8029,2,19347,1034,68622,22237,19046,97196,726,20489,22054,1,44064,15314,1112,31581,97196,1112,21286,88290]},"awcbb_yhh_fun124":{"variablePool":{"c_f_469":null},"zhili":[85496,23895,1116,77916,35304,50359,1116,54271,88069,97196,609,78938,20807,1]},"awcbb_yhh_fun125":{"variablePool":{"c_f_470":null},"zhili":[55069,44863,1118,77916,74993,79904,44863,1118,89447,19372,4,2580,15183,1119,90910,66775,637,68107,70959,1,78598]},"awcbb_yhh_fun126":{"variablePool":{"c_f_473":null,"c_f_474":null,"c_f_475":null,"c_f_477":null},"zhili":[40277,50359,1125,25186,74443,66775,1125,18442,70729,1126,343,46989,30842,1127,16411,75430,1125,21286,36951,1128,343,88285,81331,1129,99240,57485,1129,19676,10804,9,56594,75430,1127,45936,35304,1500,1130,87158,37,47706,88838,1127,27645,33245,97196,1129,88416,20093,36951,1131,5901,15183,1132,73695,10760,93007,984,45936,78686,616,1101,9595,1,88520,639,68107,17274,1,58582,639,87117,17274,1,16411,28718,1130,30554,62250,988,19676,14685,19347,1130,19676,15183,155,18442,89716,1,2694]},"awcbb_yhh_fun127":{"variablePool":{"c_f_478":null,"c_f_479":null,"c_f_480":null,"c_f_481":null},"zhili":[47566,23895,1134,1644,1729,76874,1134,72613,64787,88838,988,54271,78429,687,87117,64384,1,46989,31786,1135,31581,44863,1135,94591,93007,632,89447,91543,30842,1136,44863,1131,1729,50359,1136,27645,36951,32,94591,74035,61386,10,28548,1132,67057,50359,1136,87326,70729,34,67559,82147,39897,2,27026,55,23895,36,78686,53,1729,3410,1136,72613,58582,192,3823,17274,2,96905,28548,984,27645,50359,616,19676,64384,1,39485,46696,1137,97196,32,76874,53,75425,58582,1136,89043,53520,740,1101,2112,2,79904,36951,1135,17824,78429,633,10760,55502,1135,19676,63915,633,87326,23895,96,27369,59812,45071,19347,1126,44064,15183,1135,14453,44215,23895,1128,14685,93302,1137,60664,64093,39485,55672,1121,78938,93302,616,94197,57335,1,71787]},"awcbb_yhh_fun128":{"variablePool":{"c_f_483":null,"c_f_484":null,"c_f_485":null,"c_f_486":null,"c_f_487":null,"c_f_488":null,"c_f_489":null},"zhili":[7580,88520,1141,15183,1142,19347,1143,78686,1144,1828,67057,88520,1144,64787,44863,1144,21286,7020,88520,747,89043,78429,605,87117,8029,1,23997,16411,55672,1143,45936,99240,88520,1144,94591,79904,48564,1141,17824,53520,991,14453,7744,2,38865,38571,1145,46989,63915,1142,89043,35304,88838,1145,78938,62250,755,90910,6248,1,67057,50927,1146,94764,78686,1145,51073,55672,747,68107,30554,28718,1147,91758,97196,1126,10760,88838,1146,60664,42433,23895,1148,47706,55502,1143,89043,53877,48564,1101,39485,27705,1147,51073,23895,1101,54271,53877,23895,1149,67057,70729,1141,67559,53877,28548,1038,91543,27705,1147,94197,81640,1038,17824,5449,55502,1099,39485,27705,1147,67604,58582,1099,19676,38023,28548,731,96905,78429,1141,54271,76874,731,67604,99543,70729,1119,96905,55672,1144,78938,88838,1122,94591,64093,44064,53520,1121,51073,53520,616,21286,5044,1,12237]},"awcbb_yhh_fun129":{"variablePool":{"c_f_490":null,"c_f_491":null,"c_f_492":null,"c_f_493":null,"c_f_494":null},"zhili":[87472,62250,1151,28548,1152,88838,1153,57485,1154,96868,75425,15183,1154,30554,81640,1154,77333,29904,27705,747,343,48564,605,51073,4349,1,48910,94764,78686,1152,10760,81640,1152,27645,75425,75430,1154,94197,44863,1122,89447,2264,66775,1155,14453,8600,2,80220,35304,93302,1152,67559,55672,1126,88416,14685,27705,1153,67559,46989,93302,1154,43015,38865,81640,1151,90910,97196,996,54271,73059,2,78429,755,68107,60919,1,46989,6432,1156,14685,28548,1156,87326,44119]},"awcbb_yhh_fun130":{"variablePool":{"c_f_495":null,"c_f_496":null},"zhili":[74066,62250,1158,70729,1159,40217,44863,724,44064,27705,1158,88416,79056,5889,99518,7,38865,78429,1158,77333,22218,46546,15,91543,19347,1158,45936,62423,38865,88838,1159,72613,88838,687,43015,58620,2,87161]},"awcbb_yhh_fun131":{"variablePool":{"c_f_499":null,"c_f_500":null,"c_f_501":null,"c_f_502":null,"c_f_503":null,"c_f_504":null},"zhili":[40277,3410,1164,81640,1165,27705,1166,55672,1167,40217,26815,19347,1167,3823,68622,3264,2,14720,18,88184,57485,1167,78429,60,66775,130,16925,88285,81640,984,17824,75430,656,94591,73059,1,71057,94764,70729,1164,72613,10760,88838,1167,1101,31617,66775,956,41276,19347,1166,90910,47706,23895,1165,3823,35484,32512,47706,54324,990,1101,23895,616,20489,87322,1,81640,961,94197,33009,2,56594,83428,1168,64787,76874,1165,60664,67057,78429,1168,94591,36951,632,88416,78429,192,88416,58620,1,78429,53,88285,54324,1166,60664,51053,33245,28548,984,343,76874,616,94591,22054,2,26815,11000,1169,91543,36951,1168,87117,78429,633,81640,53,10760,76874,1165,78938,95,51541,31617,27705,1148,57190,28548,1168,45936,84099,93007,1101,31581,55672,1169,21286,15713,3410,1128,91543,36951,1167,27645,84099,88184,19347,1121,67559,93302,616,19676,65146,1,95833]},"awcbb_yhh_fun132":{"variablePool":{"c_f_506":null,"c_f_507":null,"c_f_508":null,"c_f_509":null,"c_f_510":null,"c_f_511":null},"zhili":[49364,81640,1173,75430,1174,62250,1175,81640,1176,46773,35304,23895,1176,39485,27705,1176,18442,99595,70729,747,17824,70729,605,17824,70959,1,89033,31581,63915,1175,88416,47706,93007,1173,43015,28548,956,94197,31581,23895,1173,18442,78429,1014,89447,94764,36951,1176,21286,78686,1161,94591,27705,1163,18442,33310,3,47706,81076,1177,10760,93007,1176,68107,70729,1101,88184,55502,1177,48863,78686,1101,343,20586,20167,1314,48564,1173,19676,88285,78429,1174,14453,31581,93302,1177,14453,93007,1148,87117,38865,3410,1176,21286,79904,88520,1016,89043,88838,1019,1101,55502,305,1101,89716,5,64787,81076,1178,88285,15183,1177,51073,56594,27705,1178,20489,76874,609,3823,60919,1,28602,63915,1178,90910,73591]},"awcbb_yhh_fun133":{"variablePool":{"c_f_512":null,"c_f_513":null,"c_f_514":null,"c_f_515":null,"c_f_516":null,"c_f_517":null},"zhili":[42801,66775,1180,53520,1181,63915,1182,23895,1183,51122,14685,57485,1183,47706,58582,1183,54271,57870,93302,747,17824,53520,605,89447,70959,1,48910,39485,93007,1181,16411,15183,1181,27645,46989,70729,1183,67559,23895,1122,3823,95729,50359,1155,51073,6248,2,66902,57190,66775,1182,48863,39485,81640,1180,51073,70729,956,19676,26815,36951,1180,17824,78429,1014,90910,41276,62250,1181,27645,88838,1128,78938,1729,75430,1183,54271,97196,1161,27645,48564,1163,88416,27229,4,64787,80153,1184,57190,54324,1183,43015,48564,1101,46989,70729,1184,27645,75430,1101,72613,5191,17441,35304,76874,1180,94197,88285,88520,1181,51073,30554,78429,1184,17824,63915,1148,3823,26815,23895,1183,72613,75425,93007,1016,21286,50359,1025,89447,88520,305,68107,64384,5,57190,15314,1185,28602,78429,1185,67559,13256]},"awcbb_yhh_fun134":{"variablePool":{"c_f_519":null,"c_f_520":null,"c_f_521":null,"c_f_522":null,"c_f_523":null,"awcbb_yhh_fun135":"awcbb_yhh_fun135","awcbb_yhh_fun136":"awcbb_yhh_fun136","c_f_536":"awcbb_yhh_fun137","c_f_543":null,"awcbb_yhh_fun138":"awcbb_yhh_fun138"},"zhili":[55502,1208,55502,1212,88520,1211,55672,1223,56812,40217,88184,55502,598,19676,47706,34739,1190,26815,63915,1190,90910,76874,602,14453,38865,15314,1191,33245,44863,1191,343,28548,1033,68107,88184,88820,1192,41276,58582,1190,94591,23895,776,89043,35304,84035,1193,31581,55502,1193,72613,78429,1194,42344,55502,750,39485,70729,1208,19676,37744,88838,739,38865,44863,1212,87117,78655,27705,956,88838,29,36951,1201,72863,98487,28548,1014,55672,32,31051,46989,66775,1192,20489,53520,605,1101,65146,1,84250,16411,53520,1193,67559,27705,1194,94197,64787,4098,1213,74443,44863,1190,67604,66775,1194,88285,15183,1213,94197,10760,19347,1192,77333,15183,761,77333,17274,1,66902,91543,75430,1193,87326,55672,1220,96856,70729,747,92486,62250,1221,36951,560,15713,10760,54324,1213,45936,78686,747,89043,19347,605,78938,57335,1,81533,93302,750,35304,57485,1223,87326,90204,35304,81640,1213,20489,27705,605,20489,87322,1,46431,1729,44863,1193,68107,19347,1220,43015,94764,60609,1224,16411,36951,1190,14453,28548,1220,96905,28548,1224,87326,57190,97196,1192,87117,88520,761,343,5044,1,71057]},"awcbb_yhh_fun135":{"variablePool":{"c_f_524":null,"c_f_525":null,"c_f_526":null,"c_f_527":null,"c_f_529":null,"c_f_530":null,"c_f_531":null,"c_f_532":null,"c_f_533":null},"zhili":[87472,9455,48012,76874,1005,17824,46989,88820,1195,57190,93302,1195,72613,78429,632,3823,1314,28718,1196,88285,88838,1195,68107,78686,633,87326,1729,83428,1197,19046,54324,1198,23000,23997,2264,36951,1198,88416,26815,60609,1199,78686,32,33245,34739,1200,53520,1201,91543,81640,1200,68107,74084,33222,26,67057,88838,1199,21286,56594,81640,1200,87326,14685,57485,1200,88416,48373,26815,44863,1200,15183,34,56594,44863,1200,343,38224,75453,79509,-35,15183,32,1314,5503,1200,50359,32,96905,94617,1202,58582,1201,91543,55672,1200,60664,22960,29878,126,99240,70729,1197,14453,56594,76874,1200,78938,41096,47706,5503,1203,78686,647,15183,60,93007,53,30554,48564,1203,48863,5292,95,48564,131,61584,1314,93302,1196,88416,81640,36,91543,66775,1203,21286,62360,77333,46748,38823,44064,81076,1204,44064,19347,1202,75430,1201,56594,28548,1204,89043,46989,78429,1199,60664,10760,66775,1200,48863,72613,88184,63915,1202,68107,70302,9359,55886,48910,38865,23895,1199,87117,88285,57485,1200,1101,20489,88184,38571,1205,75425,88838,1199,94591,94764,88838,1200,45936,41276,50359,1199,67559,26815,62250,1202,1101,20489,42870,16411,81640,1199,94591,74443,81640,1202,43015,31581,93007,1205,17824,52926,38865,53520,1200,23895,34,57190,23895,1200,60664,93063,80829,76624,-135,62423,97196,1206,95101,55502,1207,93007,32,46431,7020,58582,1207,18442,78763]},"awcbb_yhh_fun136":{"variablePool":{"c_f_534":null,"c_f_535":null},"zhili":[56812,88838,1209,93302,1210,66522,67057,81640,1209,68107,1314,55672,1210,21286,41276,54324,1209,94197,31581,75430,1210,1101,18442,19046,94764,81640,1211,43015,28548,305,60664,65146,1,44203,26645]},"awcbb_yhh_fun137":{"variablePool":{"c_f_537":null,"c_f_538":null,"c_f_539":null,"c_f_540":null,"c_f_541":null,"c_f_542":null},"zhili":[26266,58382,25887,55672,1198,21286,88184,81331,1214,62423,55502,1206,78938,67057,38571,1215,88695,75430,1207,87326,38865,21502,1216,44863,32,75425,38571,1217,28548,32,47706,28718,1218,50359,53,26815,76874,1218,78938,9561,33222,138,16411,88838,1215,55672,1201,50359,34,64787,78686,1215,94591,34701,55698,4492,16411,93302,1216,62250,1201,47706,36951,1214,3823,79904,78686,1215,90910,89043,56594,53520,1216,60664,82493,71200,78763,94764,28548,1214,67604,75425,63915,1215,3823,48863,1729,72245,1219,16411,97196,1214,18442,1729,66775,1215,18442,31581,78686,1214,14453,38865,58582,1216,94197,343,51541,44064,28548,1214,89043,88184,54324,1216,88416,67057,78686,1219,87326,5932,33245,48564,1217,67057,62250,1217,87117,78686,60,88285,88838,1218,87117,64845,76874,131,35356,39485,93007,1214,343,53520,1201,47706,78686,1214,54271,74443,50359,1216,94197,60664,30554,50359,1214,90910,16411,57485,1215,45936,20489,17681,68712,3823,77661,55837,26645,91543,78429,1218,19347,34,88285,28548,1218,88416,49904,41598,46546,-147,62423,36951,1206,33245,28548,1215,78938,20586,74993,63915,1207,57190,19347,1216,14453,22381,46989,23895,1217,67559,95833]},"awcbb_yhh_fun138":{"variablePool":{"c_f_544":null},"zhili":[68986,96868,96137,39485,75430,1213,14453,55672,750,94591,58582,305,90910,22054,1,2264,57485,747,89447,62250,1221,89043,28602,57563,1222,44863,32,94764,70729,1222,78938,56022,7598,23,9662,94764,36951,1211,67604,88520,305,20489,73059,1,64787,53520,1222,88838,34,41276,55502,1222,51073,35356,70007,83833,-32]},"awcbb_yhh_fun139":{"variablePool":{"c_f_546":null,"c_f_547":null},"zhili":[67122,62250,1226,78429,1227,51122,75425,97196,1226,17824,75425,19347,1227,67604,1314,97196,598,87117,76874,1194,54271,3410,1019,87117,6248,2,44863,155,89043,17274,0,12452]},"awcbb_yhh_fun140":{"variablePool":{"c_f_549":null,"c_f_550":null},"zhili":[11824,48564,1228,19347,1229,51122,14685,23895,598,68107,48564,677,21286,54324,707,27645,44064,55502,1228,89043,26815,28548,1229,68107,1314,58582,598,68107,44863,1194,87117,62250,1025,1101,5044,2,66775,155,89447,5044,1,10701]},"awcbb_yhh_fun141":{"variablePool":{"c_f_551":null,"c_f_552":null,"awcbb_yhh_fun142":"awcbb_yhh_fun142"},"zhili":[36951,1333,42801,44863,1230,58382,35304,36951,319,20489,10760,78429,1231,88416,78686,1232,43015,7744,1,31581,38571,1233,70729,1234,56594,36951,1333,21286,88184,36951,1230,43015,55502,1308,67559,33310,2]},"awcbb_yhh_fun142":{"variablePool":{"c_f_553":null,"c_f_554":null,"c_f_555":null,"c_f_556":null,"c_f_557":null,"c_f_558":null,"c_f_559":null,"c_f_560":null,"awcbb_yhh_fun143":"awcbb_yhh_fun143"},"zhili":[55672,1332,47566,50359,1235,47861,58582,32,96905,46696,1236,23895,32,38865,40830,1237,16411,70729,1233,43015,15183,137,54271,1729,58582,1237,20489,22960,89099,121,3410,154,64787,27705,1233,94591,31581,44863,1237,67604,19676,81640,1238,18442,6248,1,30554,11000,1239,78686,32,75425,84035,1240,78686,32,44064,83428,1241,39485,81640,1239,19676,23895,137,20489,33245,15183,1241,51073,40257,99910,38,38865,88838,1240,27705,32,1314,66775,1239,18442,30554,3410,1241,343,77333,97196,132,87117,64384,1,30554,62250,1240,89447,93006,46358,94764,55502,1241,63915,34,1729,55672,1241,89043,71104,75286,58949,-52,88838,1242,28602,3410,1239,14453,97196,137,18442,26815,54324,1240,18442,5715,92615,63365,2,46720,8,30554,62250,1236,63915,34,52926,14321,13,41276,54324,1237,19347,34,10760,93007,1237,48863,70111,97228,88832,-135,64787,97196,1235,90910,28548,1243,87117,6248,0,50359,1244,39485,28548,1230,45936,75430,1245,60664,2112,1,64787,61626,1246,55502,1247,64787,76874,1230,78938,58582,1245,94197,65146,1,33245,81331,1248,3410,1249,44064,76874,1230,87117,23895,1250,51073,57485,1251,88416,16079,1,90240,39897,113,28548,1252,23895,1249,57190,55502,1230,67559,75430,1250,89043,15183,1253,20489,22054,2,64787,53520,1248,43015,4762,15183,1254,93302,53,78655,16411,3410,1255,89043,48564,209,54271,87322,2,30554,36951,1230,72613,45071,57485,1256,97196,32,13534,3410,1257,70729,32,75055,75430,1258,75430,32,85110,44863,1259,55502,667,80217,70729,1260,27705,32,64093,78429,1261,36951,32,31051,70729,1262,27705,111,60014,31051,74443,63915,1255,20489,19347,209,60664,8029,2,99240,93302,1246,94591,49359,93007,1254,3410,131,10558,32954,81640,1263,53520,57,79005,80217,64787,48564,1255,87117,3410,209,51073,70959,2,14720,302,3410,1252,75425,88520,1230,94591,15183,1250,67559,19347,1251,78938,28030,1,68302,97012,2,87158,283,41276,66775,319,94591,78429,1264,88285,58582,1265,78938,82409,0,70729,1266,14453,28030,0,1857,93302,34,99240,27705,1236,78938,21293,29987,10,74443,57485,319,89043,58582,362,18442,53520,1267,1101,43028,11,91543,44863,319,88416,93302,1268,53520,1269,75672,11949,25,1314,97196,319,78938,19347,1268,23895,44,36951,1270,33245,88520,319,18442,70729,362,18442,19347,1267,343,28030,0,64845,77651,22381,70729,1252,57190,93302,1230,43015,97196,1250,14453,53520,1271,17824,60919,1,33245,44863,1230,67559,79601,55672,1259,63915,34,85110,53520,1260,76874,34,38023,76874,1272,93007,1273,26647,76874,1274,54324,667,99543,35304,66775,1255,19676,75430,1275,51073,17274,2,99240,44863,1246,343,43953,44863,1254,66775,32,69395,78686,1272,28548,1276,38491,88520,1274,78429,1277,42433,26815,62250,1255,54271,55672,1275,45936,85708,2,14685,54324,1230,78938,37186,44863,1257,53520,55,60400,84099,88520,1258,93302,887,75055,63915,1272,36951,1278,1800,54324,1274,81640,1279,99543,79904,97196,1255,14453,78686,1275,60664,22054,2,33245,15183,1246,67559,13592,93007,1263,54324,32,32512,15183,1272,70729,1280,78655,58582,1274,88838,1281,53877,31581,66775,1255,87326,63915,1275,48863,9595,2,31581,19347,1230,67559,4762,54324,1261,57485,32,42433,66775,1262,55502,111,94364,53877,14685,75430,1255,18442,75430,209,94197,32878,2,33245,58582,1230,87326,52987,78429,1261,81640,34,80217,28548,1262,28548,41,52994,69395,53520,1272,93302,1282,32954,62250,1274,48564,1283,83964,23895,1284,26815,62250,1332,27645,31051,16411,78686,1255,87117,19347,1275,19676,60919,2]},"awcbb_yhh_fun143":{"variablePool":{"c_f_561":null,"c_f_562":null,"c_f_563":null,"c_f_564":null,"c_f_565":null,"c_f_567":null,"awcbb_yhh_fun144":"awcbb_yhh_fun144"},"zhili":[76874,1327,67122,46773,28602,3410,1265,14453,19269,0,27705,1266,67559,22054,0,39485,81076,1285,57485,32,30554,38571,1286,57485,154,96905,88520,1189,67559,88838,1238,51073,87322,1,1314,80153,1287,64787,58582,1286,88184,44863,1287,54271,55502,137,68107,39485,44863,1286,67604,93063,26645,78686,34,41276,19347,1236,48863,21293,26375,10,57190,23895,319,87117,88520,362,67559,15183,1267,51073,79137,7,23895,1269,16411,81076,1288,68307,29,93302,1270,78429,44,41276,63915,319,17824,93007,1268,78938,71104,87788,56594,97196,319,60664,78686,362,87326,15183,1267,87326,89716,0,91706,56594,57563,1288,64787,70729,1248,87117,23627,57485,1254,15183,1289,920,81640,1272,81640,1276,5449,88184,55502,1248,43015,88542,3410,1254,78686,44,85110,63915,1272,93007,1290,53877,96905,48564,1248,78938,92486,66775,1254,88838,876,83964,36951,1272,36951,34,38023,57190,3410,1248,48863,45071,19347,1254,50359,32,69395,15183,1272,3410,1276,5449,10446,78686,1284,88285,81640,1327,45936,78655,47706,93007,1255,89043,53520,1328,94197,17274,1,36951,1275,67604,33009,2,88520,1275,3823,8600,2,81640,1275,17824,90670,2,81640,1275,21286,85708,2,35304,78686,1230,72613,43953,27705,1256,23895,34,59324,15183,1272,93302,1329,15713,28548,1330,28548,1331,64093,75425,78429,1255,72613,93007,1275,67604,16079,2]},"awcbb_yhh_fun144":{"variablePool":{"c_f_568":null,"c_f_570":null,"c_f_571":null,"c_f_572":null,"c_f_573":null,"c_f_574":null,"awcbb_yhh_fun145":"awcbb_yhh_fun145"},"zhili":[19347,1326,48796,46773,46989,78686,1265,54271,84365,0,62250,1266,19676,90670,0,79904,40830,1291,70729,34,16411,44863,1236,19676,23215,61386,10,88285,63915,319,343,78429,362,18442,66775,1267,68107,95936,7,55502,1269,64787,94617,1292,7838,15,96905,88838,319,14453,23895,362,14453,50359,1267,18442,50136,0,14685,5503,1292,16411,57485,1293,21286,65146,0,44064,34739,1294,99240,93007,1292,21286,62250,1295,35304,48564,1288,343,48564,1295,88285,93302,1285,18442,78686,1295,57190,28548,1291,87326,67423,60598,38765,71340,82493,63317,48564,1295,28602,93302,1296,88416,89716,2,14685,1500,1297,47706,78686,1287,3823,76730,15183,32,57190,80153,621,67057,53520,621,1101,33245,58582,622,27645,19347,137,17824,73674,96120,51,64787,76874,1298,1314,78686,622,77333,38865,62250,621,89447,60664,75112,28602,57485,1299,48564,32,33245,23895,1287,89447,10760,28548,1298,89043,19676,70729,132,67559,58620,1,75672,33245,27705,1286,28602,58582,1299,90910,57190,28548,1286,72613,92857,47450,28602,6935,621,14720,-65,55502,44,10760,88838,1285,72613,44492,96905,50927,1300,16411,78429,1300,75425,78686,1286,17824,94764,97196,1285,17824,97205,42429,79904,57485,1300,21286,31581,93302,225,45936,60919,1,46989,57485,1301,60664,2112,1,70729,1295,54331,14685,81640,1294,60664,15183,266,14453,57335,1,1729,84035,1302,79904,48564,1326,94197,20807,0,66102,48564,1249,10760,53520,1230,77333,23895,1250,45936,88520,1271,94591,6248,1]},"awcbb_yhh_fun145":{"variablePool":{"c_f_575":null,"c_f_576":null,"awcbb_yhh_fun146":"awcbb_yhh_fun146","c_f_577":null,"c_f_578":null},"zhili":[36951,1307,7580,46773,76874,32,33245,15314,1303,48185,100,5,1,79904,27705,1304,45936,38032,0,30554,40830,1305,93007,1306,41276,55672,1307,87117,30554,93007,1305,18442,36951,1308,68107,20807,2,48564,1309,48564,1310,44863,1311,1729,55502,1305,21286,19347,499,20489,4349,3,10446,97196,1312,14685,62250,1302,67604,64093,88838,1313,88285,62250,1285,72613,64342,50359,1314,1729,76874,1297,88416,920,75425,88520,1315,51073,88838,637,77333,8029,1,39485,50359,1305,87326,88520,1316,54271,64384,1,23895,1317,46989,70729,1305,48863,57485,1318,17824,58539,34089,2,79509,6,1314,57485,1303,78429,34,48910,12237,64787,78686,1319,74663,88290,49386,78686,1320,14685,93302,321,90910,97196,1321,14453,20807,1,99240,81076,1322,81640,34,67057,62250,1303,67559,87491,30014,14,57190,66775,1322,89043,55672,32,60664,93007,1323,55502,1324,66902,18790,12,14685,93007,1322,67604,44863,32,54271,76874,1323,19347,1325,20586]},"awcbb_yhh_fun146":{"variablePool":{},"zhili":[26266,25186]}};function cshduei(){this.cf=function(){var t;return t=this.gx[this.s][2],this.s=this.gx[this.s][0],this.CFf-=1,t;};this.cF=function(a){return this.s=this.gx[this.s][1],this.gx[this.s][2]=a,this.CFf+=1,a;};this.sf=function(){var e;if(this.CFf<1){return 10;}return e=this.gx[this.s][2],this.s=this.gx[this.s][0],this.CFf--,this.s=this.gx[this.s][1],this.gx[this.s][2]=e,this.CFf+=1,101;};this.Cf=function(){var sc,h,g=this.s;this.CFf-=1;this.s=this.gx[this.s][0];h=this.gx[g][2];while(!![]){if(g===this.gx["cbb1"][1]){break;}sc=this.gx[this.gx[g][0]][2],this.gx[this.gx[g][0]][2]=h,h=sc,g=this.gx[g][0];}return h;};this.gx={"cbb1":[undefined,"cbb357",undefined],"cbb357":["cbb1","cbb557",undefined],"cbb557":["cbb357","cbb832",undefined],"cbb832":["cbb557","cbb258",undefined],"cbb258":["cbb832","cbb636",undefined],"cbb636":["cbb258","cbb797",undefined],"cbb797":["cbb636","cbb912",undefined],"cbb912":["cbb797","cbb760",undefined],"cbb760":["cbb912","cbb35",undefined],"cbb35":["cbb760","cbb281",undefined],"cbb281":["cbb35","cbb178",undefined],"cbb178":["cbb281","cbb928",undefined],"cbb928":["cbb178","cbb384",undefined],"cbb384":["cbb928","cbb404",undefined],"cbb404":["cbb384","cbb348",undefined],"cbb348":["cbb404","cbb57",undefined],"cbb57":["cbb348","cbb672",undefined],"cbb672":["cbb57","cbb768",undefined],"cbb768":["cbb672","cbb393",undefined],"cbb393":["cbb768","cbb798",undefined],"cbb798":["cbb393","cbb349",undefined],"cbb349":["cbb798","cbb529",undefined],"cbb529":["cbb349","cbb972",undefined],"cbb972":["cbb529","cbb501",undefined],"cbb501":["cbb972","cbb136",undefined],"cbb136":["cbb501","cbb33",undefined],"cbb33":["cbb136","cbb807",undefined],"cbb807":["cbb33","cbb580",undefined],"cbb580":["cbb807","cbb12",undefined],"cbb12":["cbb580","cbb619",undefined],"cbb619":["cbb12","cbb921",undefined],"cbb921":["cbb619","cbb157",undefined],"cbb157":["cbb921","cbb606",undefined],"cbb606":["cbb157","cbb261",undefined],"cbb261":["cbb606","cbb740",undefined],"cbb740":["cbb261","cbb269",undefined],"cbb269":["cbb740","cbb481",undefined],"cbb481":["cbb269","cbb16",undefined],"cbb16":["cbb481","cbb435",undefined],"cbb435":["cbb16","cbb225",undefined],"cbb225":["cbb435","cbb560",undefined],"cbb560":["cbb225","cbb8",undefined],"cbb8":["cbb560","cbb446",undefined],"cbb446":["cbb8","cbb814",undefined],"cbb814":["cbb446","cbb264",undefined],"cbb264":["cbb814","cbb936",undefined],"cbb936":["cbb264","cbb823",undefined],"cbb823":["cbb936","cbb79",undefined],"cbb79":["cbb823","cbb71",undefined],"cbb71":["cbb79","cbb718",undefined],"cbb718":["cbb71","cbb515",undefined],"cbb515":["cbb718","cbb769",undefined],"cbb769":["cbb515","cbb472",undefined],"cbb472":["cbb769","cbb18",undefined],"cbb18":["cbb472","cbb655",undefined],"cbb655":["cbb18","cbb396",undefined],"cbb396":["cbb655","cbb176",undefined],"cbb176":["cbb396","cbb847",undefined],"cbb847":["cbb176","cbb169",undefined],"cbb169":["cbb847","cbb892",undefined],"cbb892":["cbb169","cbb924",undefined],"cbb924":["cbb892","cbb456",undefined],"cbb456":["cbb924","cbb134",undefined],"cbb134":["cbb456","cbb528",undefined],"cbb528":["cbb134","cbb118",undefined],"cbb118":["cbb528","cbb657",undefined],"cbb657":["cbb118","cbb783",undefined],"cbb783":["cbb657","cbb963",undefined],"cbb963":["cbb783","cbb174",undefined],"cbb174":["cbb963","cbb274",undefined],"cbb274":["cbb174","cbb621",undefined],"cbb621":["cbb274","cbb450",undefined],"cbb450":["cbb621","cbb819",undefined],"cbb819":["cbb450","cbb699",undefined],"cbb699":["cbb819","cbb392",undefined],"cbb392":["cbb699","cbb191",undefined],"cbb191":["cbb392","cbb5",undefined],"cbb5":["cbb191","cbb729",undefined],"cbb729":["cbb5","cbb558",undefined],"cbb558":["cbb729","cbb271",undefined],"cbb271":["cbb558","cbb747",undefined],"cbb747":["cbb271","cbb497",undefined],"cbb497":["cbb747","cbb510",undefined],"cbb510":["cbb497","cbb463",undefined],"cbb463":["cbb510","cbb800",undefined],"cbb800":["cbb463","cbb362",undefined],"cbb362":["cbb800","cbb891",undefined],"cbb891":["cbb362","cbb709",undefined],"cbb709":["cbb891","cbb296",undefined],"cbb296":["cbb709","cbb413",undefined],"cbb413":["cbb296","cbb812",undefined],"cbb812":["cbb413","cbb190",undefined],"cbb190":["cbb812","cbb91",undefined],"cbb91":["cbb190","cbb41",undefined],"cbb41":["cbb91","cbb762",undefined],"cbb762":["cbb41","cbb227",undefined],"cbb227":["cbb762","cbb863",undefined],"cbb863":["cbb227","cbb614",undefined],"cbb614":["cbb863","cbb519",undefined],"cbb519":["cbb614","cbb774",undefined],"cbb774":["cbb519","cbb726",undefined],"cbb726":["cbb774","cbb50",undefined],"cbb50":["cbb726","cbb868",undefined],"cbb868":["cbb50","cbb860",undefined],"cbb860":["cbb868","cbb696",undefined],"cbb696":["cbb860","cbb922",undefined],"cbb922":["cbb696","cbb517",undefined],"cbb517":["cbb922","cbb626",undefined],"cbb626":["cbb517","cbb897",undefined],"cbb897":["cbb626","cbb165",undefined],"cbb165":["cbb897","cbb513",undefined],"cbb513":["cbb165","cbb537",undefined],"cbb537":["cbb513","cbb238",undefined],"cbb238":["cbb537","cbb980",undefined],"cbb980":["cbb238","cbb81",undefined],"cbb81":["cbb980","cbb658",undefined],"cbb658":["cbb81","cbb123",undefined],"cbb123":["cbb658","cbb745",undefined],"cbb745":["cbb123","cbb527",undefined],"cbb527":["cbb745","cbb679",undefined],"cbb679":["cbb527","cbb228",undefined],"cbb228":["cbb679","cbb480",undefined],"cbb480":["cbb228","cbb22",undefined],"cbb22":["cbb480","cbb751",undefined],"cbb751":["cbb22","cbb954",undefined],"cbb954":["cbb751","cbb764",undefined],"cbb764":["cbb954","cbb741",undefined],"cbb741":["cbb764","cbb566",undefined],"cbb566":["cbb741","cbb28",undefined],"cbb28":["cbb566","cbb786",undefined],"cbb786":["cbb28","cbb115",undefined],"cbb115":["cbb786","cbb425",undefined],"cbb425":["cbb115","cbb911",undefined],"cbb911":["cbb425","cbb535",undefined],"cbb535":["cbb911","cbb873",undefined],"cbb873":["cbb535","cbb383",undefined],"cbb383":["cbb873","cbb358",undefined],"cbb358":["cbb383","cbb903",undefined],"cbb903":["cbb358","cbb52",undefined],"cbb52":["cbb903","cbb623",undefined],"cbb623":["cbb52","cbb532",undefined],"cbb532":["cbb623","cbb15",undefined],"cbb15":["cbb532","cbb685",undefined],"cbb685":["cbb15","cbb202",undefined],"cbb202":["cbb685","cbb406",undefined],"cbb406":["cbb202","cbb829",undefined],"cbb829":["cbb406","cbb869",undefined],"cbb869":["cbb829","cbb340",undefined],"cbb340":["cbb869","cbb457",undefined],"cbb457":["cbb340","cbb719",undefined],"cbb719":["cbb457","cbb341",undefined],"cbb341":["cbb719","cbb735",undefined],"cbb735":["cbb341","cbb777",undefined],"cbb777":["cbb735","cbb407",undefined],"cbb407":["cbb777","cbb731",undefined],"cbb731":["cbb407","cbb940",undefined],"cbb940":["cbb731","cbb634",undefined],"cbb634":["cbb940","cbb542",undefined],"cbb542":["cbb634","cbb561",undefined],"cbb561":["cbb542","cbb372",undefined],"cbb372":["cbb561","cbb432",undefined],"cbb432":["cbb372","cbb781",undefined],"cbb781":["cbb432","cbb97",undefined],"cbb97":["cbb781","cbb60",undefined],"cbb60":["cbb97","cbb881",undefined],"cbb881":["cbb60","cbb354",undefined],"cbb354":["cbb881","cbb571",undefined],"cbb571":["cbb354","cbb981",undefined],"cbb981":["cbb571","cbb291",undefined],"cbb291":["cbb981","cbb986",undefined],"cbb986":["cbb291","cbb368",undefined],"cbb368":["cbb986","cbb647",undefined],"cbb647":["cbb368","cbb24",undefined],"cbb24":["cbb647","cbb189",undefined],"cbb189":["cbb24","cbb323",undefined],"cbb323":["cbb189","cbb671",undefined],"cbb671":["cbb323","cbb469",undefined],"cbb469":["cbb671","cbb224",undefined],"cbb224":["cbb469","cbb62",undefined],"cbb62":["cbb224","cbb210",undefined],"cbb210":["cbb62","cbb350",undefined],"cbb350":["cbb210","cbb670",undefined],"cbb670":["cbb350","cbb923",undefined],"cbb923":["cbb670","cbb369",undefined],"cbb369":["cbb923","cbb765",undefined],"cbb765":["cbb369","cbb506",undefined],"cbb506":["cbb765","cbb213",undefined],"cbb213":["cbb506","cbb147",undefined],"cbb147":["cbb213","cbb48",undefined],"cbb48":["cbb147","cbb321",undefined],"cbb321":["cbb48","cbb965",undefined],"cbb965":["cbb321","cbb518",undefined],"cbb518":["cbb965","cbb577",undefined],"cbb577":["cbb518","cbb29",undefined],"cbb29":["cbb577","cbb391",undefined],"cbb391":["cbb29","cbb326",undefined],"cbb326":["cbb391","cbb779",undefined],"cbb779":["cbb326","cbb474",undefined],"cbb474":["cbb779","cbb782",undefined],"cbb782":["cbb474","cbb608",undefined],"cbb608":["cbb782","cbb579",undefined],"cbb579":["cbb608","cbb531",undefined],"cbb531":["cbb579","cbb713",undefined],"cbb713":["cbb531","cbb609",undefined],"cbb609":["cbb713","cbb825",undefined],"cbb825":["cbb609","cbb448",undefined],"cbb448":["cbb825","cbb477",undefined],"cbb477":["cbb448","cbb516",undefined],"cbb516":["cbb477","cbb644",undefined],"cbb644":["cbb516","cbb73",undefined],"cbb73":["cbb644","cbb250",undefined],"cbb250":["cbb73","cbb374",undefined],"cbb374":["cbb250","cbb784",undefined],"cbb784":["cbb374","cbb915",undefined],"cbb915":["cbb784","cbb503",undefined],"cbb503":["cbb915","cbb983",undefined],"cbb983":["cbb503","cbb266",undefined],"cbb266":["cbb983","cbb387",undefined],"cbb387":["cbb266","cbb982",undefined],"cbb982":["cbb387","cbb82",undefined],"cbb82":["cbb982","cbb218",undefined],"cbb218":["cbb82","cbb856",undefined],"cbb856":["cbb218","cbb461",undefined],"cbb461":["cbb856","cbb444",undefined],"cbb444":["cbb461","cbb298",undefined],"cbb298":["cbb444","cbb772",undefined],"cbb772":["cbb298","cbb415",undefined],"cbb415":["cbb772","cbb536",undefined],"cbb536":["cbb415","cbb212",undefined],"cbb212":["cbb536","cbb179",undefined],"cbb179":["cbb212","cbb595",undefined],"cbb595":["cbb179","cbb732",undefined],"cbb732":["cbb595","cbb399",undefined],"cbb399":["cbb732","cbb511",undefined],"cbb511":["cbb399","cbb654",undefined],"cbb654":["cbb511","cbb259",undefined],"cbb259":["cbb654","cbb230",undefined],"cbb230":["cbb259","cbb295",undefined],"cbb295":["cbb230","cbb648",undefined],"cbb648":["cbb295","cbb44",undefined],"cbb44":["cbb648","cbb128",undefined],"cbb128":["cbb44","cbb787",undefined],"cbb787":["cbb128","cbb668",undefined],"cbb668":["cbb787","cbb172",undefined],"cbb172":["cbb668","cbb285",undefined],"cbb285":["cbb172","cbb943",undefined],"cbb943":["cbb285","cbb294",undefined],"cbb294":["cbb943","cbb389",undefined],"cbb389":["cbb294","cbb846",undefined],"cbb846":["cbb389","cbb327",undefined],"cbb327":["cbb846","cbb901",undefined],"cbb901":["cbb327","cbb674",undefined],"cbb674":["cbb901","cbb796",undefined],"cbb796":["cbb674","cbb549",undefined],"cbb549":["cbb796","cbb144",undefined],"cbb144":["cbb549","cbb591",undefined],"cbb591":["cbb144","cbb328",undefined],"cbb328":["cbb591","cbb206",undefined],"cbb206":["cbb328","cbb142",undefined],"cbb142":["cbb206","cbb188",undefined],"cbb188":["cbb142","cbb701",undefined],"cbb701":["cbb188","cbb293",undefined],"cbb293":["cbb701","cbb441",undefined],"cbb441":["cbb293","cbb544",undefined],"cbb544":["cbb441","cbb833",undefined],"cbb833":["cbb544","cbb196",undefined],"cbb196":["cbb833","cbb236",undefined],"cbb236":["cbb196","cbb681",undefined],"cbb681":["cbb236","cbb953",undefined],"cbb953":["cbb681","cbb650",undefined],"cbb650":["cbb953","cbb820",undefined],"cbb820":["cbb650","cbb394",undefined],"cbb394":["cbb820","cbb412",undefined],"cbb412":["cbb394","cbb821",undefined],"cbb821":["cbb412","cbb447",undefined],"cbb447":["cbb821","cbb543",undefined],"cbb543":["cbb447","cbb834",undefined],"cbb834":["cbb543","cbb487",undefined],"cbb487":["cbb834","cbb961",undefined],"cbb961":["cbb487","cbb478",undefined],"cbb478":["cbb961","cbb25",undefined],"cbb25":["cbb478","cbb602",undefined],"cbb602":["cbb25","cbb669",undefined],"cbb669":["cbb602","cbb716",undefined],"cbb716":["cbb669","cbb667",undefined],"cbb667":["cbb716","cbb305",undefined],"cbb305":["cbb667","cbb951",undefined],"cbb951":["cbb305","cbb160",undefined],"cbb160":["cbb951","cbb795",undefined],"cbb795":["cbb160","cbb145",undefined],"cbb145":["cbb795","cbb770",undefined],"cbb770":["cbb145","cbb255",undefined],"cbb255":["cbb770","cbb788",undefined],"cbb788":["cbb255","cbb999",undefined],"cbb999":["cbb788","cbb620",undefined],"cbb620":["cbb999","cbb61",undefined],"cbb61":["cbb620","cbb850",undefined],"cbb850":["cbb61","cbb453",undefined],"cbb453":["cbb850","cbb141",undefined],"cbb141":["cbb453","cbb138",undefined],"cbb138":["cbb141","cbb968",undefined],"cbb968":["cbb138","cbb119",undefined],"cbb119":["cbb968","cbb423",undefined],"cbb423":["cbb119","cbb108",undefined],"cbb108":["cbb423","cbb51",undefined],"cbb51":["cbb108","cbb414",undefined],"cbb414":["cbb51","cbb364",undefined],"cbb364":["cbb414","cbb85",undefined],"cbb85":["cbb364","cbb808",undefined],"cbb808":["cbb85","cbb971",undefined],"cbb971":["cbb808","cbb87",undefined],"cbb87":["cbb971","cbb737",undefined],"cbb737":["cbb87","cbb828",undefined],"cbb828":["cbb737","cbb950",undefined],"cbb950":["cbb828","cbb161",undefined],"cbb161":["cbb950","cbb727",undefined],"cbb727":["cbb161","cbb904",undefined],"cbb904":["cbb727","cbb371",undefined],"cbb371":["cbb904","cbb998",undefined],"cbb998":["cbb371","cbb107",undefined],"cbb107":["cbb998","cbb483",undefined],"cbb483":["cbb107","cbb345",undefined],"cbb345":["cbb483","cbb430",undefined],"cbb430":["cbb345","cbb440",undefined],"cbb440":["cbb430","cbb715",undefined],"cbb715":["cbb440","cbb485",undefined],"cbb485":["cbb715","cbb894",undefined],"cbb894":["cbb485","cbb209",undefined],"cbb209":["cbb894","cbb651",undefined],"cbb651":["cbb209","cbb916",undefined],"cbb916":["cbb651","cbb303",undefined],"cbb303":["cbb916","cbb498",undefined],"cbb498":["cbb303","cbb401",undefined],"cbb401":["cbb498","cbb104",undefined],"cbb104":["cbb401","cbb756",undefined],"cbb756":["cbb104","cbb547",undefined],"cbb547":["cbb756","cbb866",undefined],"cbb866":["cbb547","cbb185",undefined],"cbb185":["cbb866","cbb673",undefined],"cbb673":["cbb185","cbb366",undefined],"cbb366":["cbb673","cbb194",undefined],"cbb194":["cbb366","cbb587",undefined],"cbb587":["cbb194","cbb289",undefined],"cbb289":["cbb587","cbb306",undefined],"cbb306":["cbb289","cbb149",undefined],"cbb149":["cbb306","cbb725",undefined],"cbb725":["cbb149","cbb460",undefined],"cbb460":["cbb725","cbb443",undefined],"cbb443":["cbb460","cbb687",undefined],"cbb687":["cbb443","cbb398",undefined],"cbb398":["cbb687","cbb573",undefined],"cbb573":["cbb398","cbb455",undefined],"cbb455":["cbb573","cbb204",undefined],"cbb204":["cbb455","cbb947",undefined],"cbb947":["cbb204","cbb257",undefined],"cbb257":["cbb947","cbb744",undefined],"cbb744":["cbb257","cbb975",undefined],"cbb975":["cbb744","cbb675",undefined],"cbb675":["cbb975","cbb45",undefined],"cbb45":["cbb675","cbb697",undefined],"cbb697":["cbb45","cbb776",undefined],"cbb776":["cbb697","cbb26",undefined],"cbb26":["cbb776","cbb187",undefined],"cbb187":["cbb26","cbb93",undefined],"cbb93":["cbb187","cbb604",undefined],"cbb604":["cbb93","cbb499",undefined],"cbb499":["cbb604","cbb239",undefined],"cbb239":["cbb499","cbb40",undefined],"cbb40":["cbb239","cbb445",undefined],"cbb445":["cbb40","cbb552",undefined],"cbb552":["cbb445","cbb618",undefined],"cbb618":["cbb552","cbb750",undefined],"cbb750":["cbb618","cbb766",undefined],"cbb766":["cbb750","cbb242",undefined],"cbb242":["cbb766","cbb987",undefined],"cbb987":["cbb242","cbb603",undefined],"cbb603":["cbb987","cbb906",undefined],"cbb906":["cbb603","cbb182",undefined],"cbb182":["cbb906","cbb154",undefined],"cbb154":["cbb182","cbb682",undefined],"cbb682":["cbb154","cbb32",undefined],"cbb32":["cbb682","cbb482",undefined],"cbb482":["cbb32","cbb977",undefined],"cbb977":["cbb482","cbb677",undefined],"cbb677":["cbb977","cbb867",undefined],"cbb867":["cbb677","cbb824",undefined],"cbb824":["cbb867","cbb791",undefined],"cbb791":["cbb824","cbb184",undefined],"cbb184":["cbb791","cbb792",undefined],"cbb792":["cbb184","cbb284",undefined],"cbb284":["cbb792","cbb994",undefined],"cbb994":["cbb284","cbb433",undefined],"cbb433":["cbb994","cbb929",undefined],"cbb929":["cbb433","cbb208",undefined],"cbb208":["cbb929","cbb500",undefined],"cbb500":["cbb208","cbb21",undefined],"cbb21":["cbb500","cbb386",undefined],"cbb386":["cbb21","cbb143",undefined],"cbb143":["cbb386","cbb251",undefined],"cbb251":["cbb143","cbb49",undefined],"cbb49":["cbb251","cbb278",undefined],"cbb278":["cbb49","cbb818",undefined],"cbb818":["cbb278","cbb859",undefined],"cbb859":["cbb818","cbb533",undefined],"cbb533":["cbb859","cbb607",undefined],"cbb607":["cbb533","cbb628",undefined],"cbb628":["cbb607","cbb899",undefined],"cbb899":["cbb628","cbb132",undefined],"cbb132":["cbb899","cbb63",undefined],"cbb63":["cbb132","cbb110",undefined],"cbb110":["cbb63","cbb990",undefined],"cbb990":["cbb110","cbb749",undefined],"cbb749":["cbb990","cbb92",undefined],"cbb92":["cbb749","cbb944",undefined],"cbb944":["cbb92","cbb962",undefined],"cbb962":["cbb944","cbb616",undefined],"cbb616":["cbb962","cbb313",undefined],"cbb313":["cbb616","cbb408",undefined],"cbb408":["cbb313","cbb380",undefined],"cbb380":["cbb408","cbb969",undefined],"cbb969":["cbb380","cbb129",undefined],"cbb129":["cbb969","cbb639",undefined],"cbb639":["cbb129","cbb752",undefined],"cbb752":["cbb639","cbb893",undefined],"cbb893":["cbb752","cbb739",undefined],"cbb739":["cbb893","cbb970",undefined],"cbb970":["cbb739","cbb589",undefined],"cbb589":["cbb970","cbb197",undefined],"cbb197":["cbb589","cbb300",undefined],"cbb300":["cbb197","cbb167",undefined],"cbb167":["cbb300","cbb646",undefined],"cbb646":["cbb167","cbb553",undefined],"cbb553":["cbb646","cbb785",undefined],"cbb785":["cbb553","cbb127",undefined],"cbb127":["cbb785","cbb942",undefined],"cbb942":["cbb127","cbb96",undefined],"cbb96":["cbb942","cbb329",undefined],"cbb329":["cbb96","cbb815",undefined],"cbb815":["cbb329","cbb489",undefined],"cbb489":["cbb815","cbb563",undefined],"cbb563":["cbb489","cbb908",undefined],"cbb908":["cbb563","cbb708",undefined],"cbb708":["cbb908","cbb993",undefined],"cbb993":["cbb708","cbb466",undefined],"cbb466":["cbb993","cbb102",undefined],"cbb102":["cbb466","cbb914",undefined],"cbb914":["cbb102","cbb19",undefined],"cbb19":["cbb914","cbb14",undefined],"cbb14":["cbb19","cbb13",undefined],"cbb13":["cbb14","cbb493",undefined],"cbb493":["cbb13","cbb76",undefined],"cbb76":["cbb493","cbb979",undefined],"cbb979":["cbb76","cbb767",undefined],"cbb767":["cbb979","cbb3",undefined],"cbb3":["cbb767","cbb882",undefined],"cbb882":["cbb3","cbb205",undefined],"cbb205":["cbb882","cbb249",undefined],"cbb249":["cbb205","cbb361",undefined],"cbb361":["cbb249","cbb666",undefined],"cbb666":["cbb361","cbb66",undefined],"cbb66":["cbb666","cbb661",undefined],"cbb661":["cbb66","cbb333",undefined],"cbb333":["cbb661","cbb438",undefined],"cbb438":["cbb333","cbb754",undefined],"cbb754":["cbb438","cbb37",undefined],"cbb37":["cbb754","cbb409",undefined],"cbb409":["cbb37","cbb889",undefined],"cbb889":["cbb409","cbb355",undefined],"cbb355":["cbb889","cbb479",undefined],"cbb479":["cbb355","cbb967",undefined],"cbb967":["cbb479","cbb331",undefined],"cbb331":["cbb967","cbb55",undefined],"cbb55":["cbb331","cbb156",undefined],"cbb156":["cbb55","cbb265",undefined],"cbb265":["cbb156","cbb162",undefined],"cbb162":["cbb265","cbb436",undefined],"cbb436":["cbb162","cbb275",undefined],"cbb275":["cbb436","cbb20",undefined],"cbb20":["cbb275","cbb858",undefined],"cbb858":["cbb20","cbb991",undefined],"cbb991":["cbb858","cbb320",undefined],"cbb320":["cbb991","cbb793",undefined],"cbb793":["cbb320","cbb11",undefined],"cbb11":["cbb793","cbb43",undefined],"cbb43":["cbb11","cbb405",undefined],"cbb405":["cbb43","cbb491",undefined],"cbb491":["cbb405","cbb34",undefined],"cbb34":["cbb491","cbb870",undefined],"cbb870":["cbb34","cbb151",undefined],"cbb151":["cbb870","cbb884",undefined],"cbb884":["cbb151","cbb841",undefined],"cbb841":["cbb884","cbb581",undefined],"cbb581":["cbb841","cbb692",undefined],"cbb692":["cbb581","cbb945",undefined],"cbb945":["cbb692","cbb54",undefined],"cbb54":["cbb945","cbb42",undefined],"cbb42":["cbb54","cbb153",undefined],"cbb153":["cbb42","cbb70",undefined],"cbb70":["cbb153","cbb694",undefined],"cbb694":["cbb70","cbb171",undefined],"cbb171":["cbb694","cbb548",undefined],"cbb548":["cbb171","cbb538",undefined],"cbb538":["cbb548","cbb584",undefined],"cbb584":["cbb538","cbb723",undefined],"cbb723":["cbb584","cbb451",undefined],"cbb451":["cbb723","cbb550",undefined],"cbb550":["cbb451","cbb356",undefined],"cbb356":["cbb550","cbb417",undefined],"cbb417":["cbb356","cbb966",undefined],"cbb966":["cbb417","cbb426",undefined],"cbb426":["cbb966","cbb895",undefined],"cbb895":["cbb426","cbb570",undefined],"cbb570":["cbb895","cbb471",undefined],"cbb471":["cbb570","cbb738",undefined],"cbb738":["cbb471","cbb337",undefined],"cbb337":["cbb738","cbb78",undefined],"cbb78":["cbb337","cbb229",undefined],"cbb229":["cbb78","cbb806",undefined],"cbb806":["cbb229","cbb133",undefined],"cbb133":["cbb806","cbb996",undefined],"cbb996":["cbb133","cbb909",undefined],"cbb909":["cbb996","cbb559",undefined],"cbb559":["cbb909","cbb342",undefined],"cbb342":["cbb559","cbb757",undefined],"cbb757":["cbb342","cbb247",undefined],"cbb247":["cbb757","cbb183",undefined],"cbb183":["cbb247","cbb843",undefined],"cbb843":["cbb183","cbb325",undefined],"cbb325":["cbb843","cbb139",undefined],"cbb139":["cbb325","cbb395",undefined],"cbb395":["cbb139","cbb198",undefined],"cbb198":["cbb395","cbb38",undefined],"cbb38":["cbb198","cbb853",undefined],"cbb853":["cbb38","cbb880",undefined],"cbb880":["cbb853","cbb913",undefined],"cbb913":["cbb880","cbb941",undefined],"cbb941":["cbb913","cbb428",undefined],"cbb428":["cbb941","cbb318",undefined],"cbb318":["cbb428","cbb495",undefined],"cbb495":["cbb318","cbb90",undefined],"cbb90":["cbb495","cbb233",undefined],"cbb233":["cbb90","cbb775",undefined],"cbb775":["cbb233","cbb286",undefined],"cbb286":["cbb775","cbb830",undefined],"cbb830":["cbb286","cbb759",undefined],"cbb759":["cbb830","cbb94",undefined],"cbb94":["cbb759","cbb876",undefined],"cbb876":["cbb94","cbb597",undefined],"cbb597":["cbb876","cbb598",undefined],"cbb598":["cbb597","cbb267",undefined],"cbb267":["cbb598","cbb804",undefined],"cbb804":["cbb267","cbb638",undefined],"cbb638":["cbb804","cbb660",undefined],"cbb660":["cbb638","cbb612",undefined],"cbb612":["cbb660","cbb871",undefined],"cbb871":["cbb612","cbb7",undefined],"cbb7":["cbb871","cbb23",undefined],"cbb23":["cbb7","cbb159",undefined],"cbb159":["cbb23","cbb937",undefined],"cbb937":["cbb159","cbb437",undefined],"cbb437":["cbb937","cbb875",undefined],"cbb875":["cbb437","cbb702",undefined],"cbb702":["cbb875","cbb684",undefined],"cbb684":["cbb702","cbb276",undefined],"cbb276":["cbb684","cbb130",undefined],"cbb130":["cbb276","cbb192",undefined],"cbb192":["cbb130","cbb599",undefined],"cbb599":["cbb192","cbb175",undefined],"cbb175":["cbb599","cbb64",undefined],"cbb64":["cbb175","cbb317",undefined],"cbb317":["cbb64","cbb665",undefined],"cbb665":["cbb317","cbb410",undefined],"cbb410":["cbb665","cbb100",undefined],"cbb100":["cbb410","cbb574",undefined],"cbb574":["cbb100","cbb146",undefined],"cbb146":["cbb574","cbb840",undefined],"cbb840":["cbb146","cbb363",undefined],"cbb363":["cbb840","cbb381",undefined],"cbb381":["cbb363","cbb703",undefined],"cbb703":["cbb381","cbb31",undefined],"cbb31":["cbb703","cbb367",undefined],"cbb367":["cbb31","cbb470",undefined],"cbb470":["cbb367","cbb243",undefined],"cbb243":["cbb470","cbb308",undefined],"cbb308":["cbb243","cbb65",undefined],"cbb65":["cbb308","cbb662",undefined],"cbb662":["cbb65","cbb310",undefined],"cbb310":["cbb662","cbb464",undefined],"cbb464":["cbb310","cbb546",undefined],"cbb546":["cbb464","cbb382",undefined],"cbb382":["cbb546","cbb442",undefined],"cbb442":["cbb382","cbb746",undefined],"cbb746":["cbb442","cbb468",undefined],"cbb468":["cbb746","cbb643",undefined],"cbb643":["cbb468","cbb241",undefined],"cbb241":["cbb643","cbb288",undefined],"cbb288":["cbb241","cbb878",undefined],"cbb878":["cbb288","cbb245",undefined],"cbb245":["cbb878","cbb254",undefined],"cbb254":["cbb245","cbb852",undefined],"cbb852":["cbb254","cbb221",undefined],"cbb221":["cbb852","cbb592",undefined],"cbb592":["cbb221","cbb47",undefined],"cbb47":["cbb592","cbb753",undefined],"cbb753":["cbb47","cbb974",undefined],"cbb974":["cbb753","cbb58",undefined],"cbb58":["cbb974","cbb290",undefined],"cbb290":["cbb58","cbb933",undefined],"cbb933":["cbb290","cbb125",undefined],"cbb125":["cbb933","cbb720",undefined],"cbb720":["cbb125","cbb421",undefined],"cbb421":["cbb720","cbb2",undefined],"cbb2":["cbb421","cbb292",undefined],"cbb292":["cbb2","cbb811",undefined],"cbb811":["cbb292","cbb736",undefined],"cbb736":["cbb811","cbb431",undefined],"cbb431":["cbb736","cbb458",undefined],"cbb458":["cbb431","cbb976",undefined],"cbb976":["cbb458","cbb137",undefined],"cbb137":["cbb976","cbb226",undefined],"cbb226":["cbb137","cbb693",undefined],"cbb693":["cbb226","cbb75",undefined],"cbb75":["cbb693","cbb521",undefined],"cbb521":["cbb75","cbb509",undefined],"cbb509":["cbb521","cbb514",undefined],"cbb514":["cbb509","cbb322",undefined],"cbb322":["cbb514","cbb219",undefined],"cbb219":["cbb322","cbb427",undefined],"cbb427":["cbb219","cbb653",undefined],"cbb653":["cbb427","cbb330",undefined],"cbb330":["cbb653","cbb68",undefined],"cbb68":["cbb330","cbb883",undefined],"cbb883":["cbb68","cbb140",undefined],"cbb140":["cbb883","cbb508",undefined],"cbb508":["cbb140","cbb135",undefined],"cbb135":["cbb508","cbb585",undefined],"cbb585":["cbb135","cbb309",undefined],"cbb309":["cbb585","cbb664",undefined],"cbb664":["cbb309","cbb416",undefined],"cbb416":["cbb664","cbb86",undefined],"cbb86":["cbb416","cbb155",undefined],"cbb155":["cbb86","cbb956",undefined],"cbb956":["cbb155","cbb728",undefined],"cbb728":["cbb956","cbb680",undefined],"cbb680":["cbb728","cbb282",undefined],"cbb282":["cbb680","cbb676",undefined],"cbb676":["cbb282","cbb80",undefined],"cbb80":["cbb676","cbb605",undefined],"cbb605":["cbb80","cbb486",undefined],"cbb486":["cbb605","cbb835",undefined],"cbb835":["cbb486","cbb691",undefined],"cbb691":["cbb835","cbb262",undefined],"cbb262":["cbb691","cbb164",undefined],"cbb164":["cbb262","cbb297",undefined],"cbb297":["cbb164","cbb988",undefined],"cbb988":["cbb297","cbb83",undefined],"cbb83":["cbb988","cbb72",undefined],"cbb72":["cbb83","cbb158",undefined],"cbb158":["cbb72","cbb287",undefined],"cbb287":["cbb158","cbb887",undefined],"cbb887":["cbb287","cbb505",undefined],"cbb505":["cbb887","cbb234",undefined],"cbb234":["cbb505","cbb168",undefined],"cbb168":["cbb234","cbb150",undefined],"cbb150":["cbb168","cbb424",undefined],"cbb424":["cbb150","cbb907",undefined],"cbb907":["cbb424","cbb526",undefined],"cbb526":["cbb907","cbb932",undefined],"cbb932":["cbb526","cbb353",undefined],"cbb353":["cbb932","cbb801",undefined],"cbb801":["cbb353","cbb403",undefined],"cbb403":["cbb801","cbb710",undefined],"cbb710":["cbb403","cbb862",undefined],"cbb862":["cbb710","cbb706",undefined],"cbb706":["cbb862","cbb635",undefined],"cbb635":["cbb706","cbb656",undefined],"cbb656":["cbb635","cbb116",undefined],"cbb116":["cbb656","cbb336",undefined],"cbb336":["cbb116","cbb101",undefined],"cbb101":["cbb336","cbb836",undefined],"cbb836":["cbb101","cbb582",undefined],"cbb582":["cbb836","cbb200",undefined],"cbb200":["cbb582","cbb512",undefined],"cbb512":["cbb200","cbb831",undefined],"cbb831":["cbb512","cbb454",undefined],"cbb454":["cbb831","cbb613",undefined],"cbb613":["cbb454","cbb237",undefined],"cbb237":["cbb613","cbb865",undefined],"cbb865":["cbb237","cbb564",undefined],"cbb564":["cbb865","cbb590",undefined],"cbb590":["cbb564","cbb678",undefined],"cbb678":["cbb590","cbb935",undefined],"cbb935":["cbb678","cbb84",undefined],"cbb84":["cbb935","cbb730",undefined],"cbb730":["cbb84","cbb193",undefined],"cbb193":["cbb730","cbb839",undefined],"cbb839":["cbb193","cbb490",undefined],"cbb490":["cbb839","cbb344",undefined],"cbb344":["cbb490","cbb704",undefined],"cbb704":["cbb344","cbb315",undefined],"cbb315":["cbb704","cbb475",undefined],"cbb475":["cbb315","cbb642",undefined],"cbb642":["cbb475","cbb851",undefined],"cbb851":["cbb642","cbb117",undefined],"cbb117":["cbb851","cbb524",undefined],"cbb524":["cbb117","cbb695",undefined],"cbb695":["cbb524","cbb707",undefined],"cbb707":["cbb695","cbb351",undefined],"cbb351":["cbb707","cbb845",undefined],"cbb845":["cbb351","cbb645",undefined],"cbb645":["cbb845","cbb496",undefined],"cbb496":["cbb645","cbb625",undefined],"cbb625":["cbb496","cbb53",undefined],"cbb53":["cbb625","cbb390",undefined],"cbb390":["cbb53","cbb397",undefined],"cbb397":["cbb390","cbb855",undefined],"cbb855":["cbb397","cbb578",undefined],"cbb578":["cbb855","cbb958",undefined],"cbb958":["cbb578","cbb827",undefined],"cbb827":["cbb958","cbb803",undefined],"cbb803":["cbb827","cbb712",undefined],"cbb712":["cbb803","cbb270",undefined],"cbb270":["cbb712","cbb113",undefined],"cbb113":["cbb270","cbb268",undefined],"cbb268":["cbb113","cbb484",undefined],"cbb484":["cbb268","cbb568",undefined],"cbb568":["cbb484","cbb67",undefined],"cbb67":["cbb568","cbb248",undefined],"cbb248":["cbb67","cbb864",undefined],"cbb864":["cbb248","cbb539",undefined],"cbb539":["cbb864","cbb376",undefined],"cbb376":["cbb539","cbb520",undefined],"cbb520":["cbb376","cbb379",undefined],"cbb379":["cbb520","cbb600",undefined],"cbb600":["cbb379","cbb576",undefined],"cbb576":["cbb600","cbb180",undefined],"cbb180":["cbb576","cbb721",undefined],"cbb721":["cbb180","cbb89",undefined],"cbb89":["cbb721","cbb789",undefined],"cbb789":["cbb89","cbb872",undefined],"cbb872":["cbb789","cbb724",undefined],"cbb724":["cbb872","cbb794",undefined],"cbb794":["cbb724","cbb181",undefined],"cbb181":["cbb794","cbb930",undefined],"cbb930":["cbb181","cbb848",undefined],"cbb848":["cbb930","cbb429",undefined],"cbb429":["cbb848","cbb705",undefined],"cbb705":["cbb429","cbb686",undefined],"cbb686":["cbb705","cbb319",undefined],"cbb319":["cbb686","cbb809",undefined],"cbb809":["cbb319","cbb931",undefined],"cbb931":["cbb809","cbb593",undefined],"cbb593":["cbb931","cbb385",undefined],"cbb385":["cbb593","cbb332",undefined],"cbb332":["cbb385","cbb896",undefined],"cbb896":["cbb332","cbb214",undefined],"cbb214":["cbb896","cbb755",undefined],"cbb755":["cbb214","cbb637",undefined],"cbb637":["cbb755","cbb99",undefined],"cbb99":["cbb637","cbb231",undefined],"cbb231":["cbb99","cbb280",undefined],"cbb280":["cbb231","cbb717",undefined],"cbb717":["cbb280","cbb30",undefined],"cbb30":["cbb717","cbb920",undefined],"cbb920":["cbb30","cbb260",undefined],"cbb260":["cbb920","cbb69",undefined],"cbb69":["cbb260","cbb763",undefined],"cbb763":["cbb69","cbb166",undefined],"cbb166":["cbb763","cbb622",undefined],"cbb622":["cbb166","cbb88",undefined],"cbb88":["cbb622","cbb253",undefined],"cbb253":["cbb88","cbb964",undefined],"cbb964":["cbb253","cbb36",undefined],"cbb36":["cbb964","cbb627",undefined],"cbb627":["cbb36","cbb223",undefined],"cbb223":["cbb627","cbb277",undefined],"cbb277":["cbb223","cbb112",undefined],"cbb112":["cbb277","cbb939",undefined],"cbb939":["cbb112","cbb525",undefined],"cbb525":["cbb939","cbb240",undefined],"cbb240":["cbb525","cbb601",undefined],"cbb601":["cbb240","cbb163",undefined],"cbb163":["cbb601","cbb343",undefined],"cbb343":["cbb163","cbb301",undefined],"cbb301":["cbb343","cbb683",undefined],"cbb683":["cbb301","cbb173",undefined],"cbb173":["cbb683","cbb201",undefined],"cbb201":["cbb173","cbb338",undefined],"cbb338":["cbb201","cbb273",undefined],"cbb273":["cbb338","cbb992",undefined],"cbb992":["cbb273","cbb504",undefined],"cbb504":["cbb992","cbb419",undefined],"cbb419":["cbb504","cbb6",undefined],"cbb6":["cbb419","cbb934",undefined],"cbb934":["cbb6","cbb583",undefined],"cbb583":["cbb934","cbb917",undefined],"cbb917":["cbb583","cbb465",undefined],"cbb465":["cbb917","cbb842",undefined],"cbb842":["cbb465","cbb46",undefined],"cbb46":["cbb842","cbb838",undefined],"cbb838":["cbb46","cbb59",undefined],"cbb59":["cbb838","cbb973",undefined],"cbb973":["cbb59","cbb370",undefined],"cbb370":["cbb973","cbb4",undefined],"cbb4":["cbb370","cbb857",undefined],"cbb857":["cbb4","cbb849",undefined],"cbb849":["cbb857","cbb272",undefined],"cbb272":["cbb849","cbb211",undefined],"cbb211":["cbb272","cbb790",undefined],"cbb790":["cbb211","cbb467",undefined],"cbb467":["cbb790","cbb810",undefined],"cbb810":["cbb467","cbb235",undefined],"cbb235":["cbb810","cbb373",undefined],"cbb373":["cbb235","cbb984",undefined],"cbb984":["cbb373","cbb388",undefined],"cbb388":["cbb984","cbb312",undefined],"cbb312":["cbb388","cbb311",undefined],"cbb311":["cbb312","cbb541",undefined],"cbb541":["cbb311","cbb120",undefined],"cbb120":["cbb541","cbb105",undefined],"cbb105":["cbb120","cbb959",undefined],"cbb959":["cbb105","cbb507",undefined],"cbb507":["cbb959","cbb698",undefined],"cbb698":["cbb507","cbb556",undefined],"cbb556":["cbb698","cbb985",undefined],"cbb985":["cbb556","cbb652",undefined],"cbb652":["cbb985","cbb861",undefined],"cbb861":["cbb652","cbb805",undefined],"cbb805":["cbb861","cbb649",undefined],"cbb649":["cbb805","cbb459",undefined],"cbb459":["cbb649","cbb743",undefined],"cbb743":["cbb459","cbb700",undefined],"cbb700":["cbb743","cbb109",undefined],"cbb109":["cbb700","cbb837",undefined],"cbb837":["cbb109","cbb452",undefined],"cbb452":["cbb837","cbb946",undefined],"cbb946":["cbb452","cbb874",undefined],"cbb874":["cbb946","cbb347",undefined],"cbb347":["cbb874","cbb659",undefined],"cbb659":["cbb347","cbb195",undefined],"cbb195":["cbb659","cbb640",undefined],"cbb640":["cbb195","cbb948",undefined],"cbb948":["cbb640","cbb778",undefined],"cbb778":["cbb948","cbb844",undefined],"cbb844":["cbb778","cbb569",undefined],"cbb569":["cbb844","cbb283",undefined],"cbb283":["cbb569","cbb199",undefined],"cbb199":["cbb283","cbb9",undefined],"cbb9":["cbb199","cbb688",undefined],"cbb688":["cbb9","cbb817",undefined],"cbb817":["cbb688","cbb316",undefined],"cbb316":["cbb817","cbb106",undefined],"cbb106":["cbb316","cbb890",undefined],"cbb890":["cbb106","cbb377",undefined],"cbb377":["cbb890","cbb10",undefined],"cbb10":["cbb377","cbb630",undefined],"cbb630":["cbb10","cbb534",undefined],"cbb534":["cbb630","cbb492",undefined],"cbb492":["cbb534","cbb617",undefined],"cbb617":["cbb492","cbb748",undefined],"cbb748":["cbb617","cbb378",undefined],"cbb378":["cbb748","cbb375",undefined],"cbb375":["cbb378","cbb186",undefined],"cbb186":["cbb375","cbb575",undefined],"cbb575":["cbb186","cbb502",undefined],"cbb502":["cbb575","cbb952",undefined],"cbb952":["cbb502","cbb919",undefined],"cbb919":["cbb952","cbb352",undefined],"cbb352":["cbb919","cbb615",undefined],"cbb615":["cbb352","cbb918",undefined],"cbb918":["cbb615","cbb449",undefined],"cbb449":["cbb918","cbb476",undefined],"cbb476":["cbb449","cbb957",undefined],"cbb957":["cbb476","cbb246",undefined],"cbb246":["cbb957","cbb411",undefined],"cbb411":["cbb246","cbb572",undefined],"cbb572":["cbb411","cbb588",undefined],"cbb588":["cbb572","cbb334",undefined],"cbb334":["cbb588","cbb632",undefined],"cbb632":["cbb334","cbb885",undefined],"cbb885":["cbb632","cbb244",undefined],"cbb244":["cbb885","cbb663",undefined],"cbb663":["cbb244","cbb304",undefined],"cbb304":["cbb663","cbb822",undefined],"cbb822":["cbb304","cbb596",undefined],"cbb596":["cbb822","cbb365",undefined],"cbb365":["cbb596","cbb217",undefined],"cbb217":["cbb365","cbb103",undefined],"cbb103":["cbb217","cbb554",undefined],"cbb554":["cbb103","cbb530",undefined],"cbb530":["cbb554","cbb722",undefined],"cbb722":["cbb530","cbb462",undefined],"cbb462":["cbb722","cbb978",undefined],"cbb978":["cbb462","cbb95",undefined],"cbb95":["cbb978","cbb402",undefined],"cbb402":["cbb95","cbb610",undefined],"cbb610":["cbb402","cbb122",undefined],"cbb122":["cbb610","cbb98",undefined],"cbb98":["cbb122","cbb689",undefined],"cbb689":["cbb98","cbb714",undefined],"cbb714":["cbb689","cbb473",undefined],"cbb473":["cbb714","cbb177",undefined],"cbb177":["cbb473","cbb111",undefined],"cbb111":["cbb177","cbb949",undefined],"cbb949":["cbb111","cbb220",undefined],"cbb220":["cbb949","cbb222",undefined],"cbb222":["cbb220","cbb799",undefined],"cbb799":["cbb222","cbb905",undefined],"cbb905":["cbb799","cbb307",undefined],"cbb307":["cbb905","cbb418",undefined],"cbb418":["cbb307","cbb641",undefined],"cbb641":["cbb418","cbb422",undefined],"cbb422":["cbb641","cbb955",undefined],"cbb955":["cbb422","cbb279",undefined],"cbb279":["cbb955","cbb879",undefined],"cbb879":["cbb279","cbb567",undefined],"cbb567":["cbb879","cbb540",undefined],"cbb540":["cbb567","cbb252",undefined],"cbb252":["cbb540","cbb360",undefined],"cbb360":["cbb252","cbb551",undefined],"cbb551":["cbb360","cbb434",undefined],"cbb434":["cbb551","cbb711",undefined],"cbb711":["cbb434","cbb359",undefined],"cbb359":["cbb711","cbb216",undefined],"cbb216":["cbb359","cbb925",undefined],"cbb925":["cbb216","cbb494",undefined],"cbb494":["cbb925","cbb960",undefined],"cbb960":["cbb494","cbb522",undefined],"cbb522":["cbb960","cbb594",undefined],"cbb594":["cbb522","cbb586",undefined],"cbb586":["cbb594","cbb56",undefined],"cbb56":["cbb586","cbb773",undefined],"cbb773":["cbb56","cbb523",undefined],"cbb523":["cbb773","cbb131",undefined],"cbb131":["cbb523","cbb888",undefined],"cbb888":["cbb131","cbb910",undefined],"cbb910":["cbb888","cbb215",undefined],"cbb215":["cbb910","cbb886",undefined],"cbb886":["cbb215","cbb734",undefined],"cbb734":["cbb886","cbb124",undefined],"cbb124":["cbb734","cbb74",undefined],"cbb74":["cbb124","cbb17",undefined],"cbb17":["cbb74","cbb152",undefined],"cbb152":["cbb17","cbb611",undefined],"cbb611":["cbb152","cbb555",undefined],"cbb555":["cbb611","cbb545",undefined],"cbb545":["cbb555","cbb816",undefined],"cbb816":["cbb545","cbb565",undefined],"cbb565":["cbb816","cbb813",undefined],"cbb813":["cbb565","cbb207",undefined],"cbb207":["cbb813","cbb761",undefined],"cbb761":["cbb207","cbb802",undefined],"cbb802":["cbb761","cbb339",undefined],"cbb339":["cbb802","cbb898",undefined],"cbb898":["cbb339","cbb900",undefined],"cbb900":["cbb898","cbb926",undefined],"cbb926":["cbb900","cbb126",undefined],"cbb126":["cbb926","cbb758",undefined],"cbb758":["cbb126","cbb624",undefined],"cbb624":["cbb758","cbb335",undefined],"cbb335":["cbb624","cbb302",undefined],"cbb302":["cbb335","cbb927",undefined],"cbb927":["cbb302","cbb170",undefined],"cbb170":["cbb927","cbb400",undefined],"cbb400":["cbb170","cbb299",undefined],"cbb299":["cbb400","cbb826",undefined],"cbb826":["cbb299","cbb562",undefined],"cbb562":["cbb826","cbb997",undefined],"cbb997":["cbb562","cbb420",undefined],"cbb420":["cbb997","cbb938",undefined],"cbb938":["cbb420","cbb629",undefined],"cbb629":["cbb938","cbb633",undefined],"cbb633":["cbb629","cbb346",undefined],"cbb346":["cbb633","cbb877",undefined],"cbb877":["cbb346","cbb27",undefined],"cbb27":["cbb877","cbb902",undefined],"cbb902":["cbb27","cbb439",undefined],"cbb439":["cbb902","cbb989",undefined],"cbb989":["cbb439","cbb263",undefined],"cbb263":["cbb989","cbb203",undefined],"cbb203":["cbb263","cbb77",undefined],"cbb77":["cbb203","cbb324",undefined],"cbb324":["cbb77","cbb148",undefined],"cbb148":["cbb324","cbb742",undefined],"cbb742":["cbb148","cbb488",undefined],"cbb488":["cbb742","cbb733",undefined],"cbb733":["cbb488","cbb39",undefined],"cbb39":["cbb733","cbb114",undefined],"cbb114":["cbb39","cbb771",undefined],"cbb771":["cbb114","cbb256",undefined],"cbb256":["cbb771","cbb121",undefined],"cbb121":["cbb256","cbb314",undefined],"cbb314":["cbb121","cbb854",undefined],"cbb854":["cbb314","cbb995",undefined],"cbb995":["cbb854","cbb631",undefined],"cbb631":["cbb995","cbb780",undefined],"cbb780":["cbb631","cbb232",undefined],"cbb232":["cbb780","cbb690",undefined],"cbb690":["cbb232","",undefined]};this.s="cbb1";this.CFf=0;}function cltothis(cythis,poolList,off){for(let i in poolList){off==1?cythis[i]=undefined:cythis[i]=poolList[i];}}(function(){var oost=0;var oo4829=10;var oo4828=1;var oo4827=10;var oo4826=10;var oo4825=1;var oo4824=1;var oo4823=10;var oo4822=99911;var oo4821=99960;var oo4820=9;var oo4819=99812;var oo4818=99900;var oo4817=99910;var oo4816=99596;var oo4815=99741;var oo4814=99544;var oo4813=10;var oo4812=99533;var oo4811=99543;var oo4810=99595;var oo4809=99811;var oo4808=9;var oo4807=10;var oo4806=0;var oo4805=0;var oo4804=0;var oo4803=0;var oo4802=0;var oo4801=2;var oo4800=0;var oo4799=2;var oo4798=99256;var oo4797=99518;var oo4796=99042;var oo4795=98964;var oo4794=99041;var oo4793=99255;var oo4792=98705;var oo4791=98844;var oo4790=98578;var oo4789=98550;var oo4788=98577;var oo4787=98704;var oo4786=98963;var oo4785=99532;var oo4784=10;var oo4783=1;var oo4782=10;var oo4781=10;var oo4780=1;var oo4779=1;var oo4778=98292;var oo4777=98487;var oo4776=98146;var oo4775=10;var oo4774=0;var oo4773=0;var oo4772=0;var oo4771=0;var oo4770=0;var oo4769=2;var oo4768=0;var oo4767=2;var oo4766=98106;var oo4765=98145;var oo4764=98291;var oo4763=0;var oo4762=1;var oo4761=0;var oo4760=1;var oo4759=0;var oo4758=0;var oo4757=0;var oo4756=97896;var oo4755=98065;var oo4754=97797;var oo4753=97496;var oo4752=97796;var oo4751=97895;var oo4750=98105;var oo4749=0;var oo4748=97257;var oo4747=97472;var oo4746=97206;var oo4745=1;var oo4744=97106;var oo4743=97205;var oo4742=97256;var oo4741=9;var oo4740=10;var oo4739=96906;var oo4738=97012;var oo4737=0;var oo4736=96869;var oo4735=96779;var oo4734=96868;var oo4733=96905;var oo4732=97105;var oo4731=97495;var oo4730=98549;var oo4729=0;var oo4728=1;var oo4727=0;var oo4726=1;var oo4725=0;var oo4724=0;var oo4723=0;var oo4722=96582;var oo4721=96730;var oo4720=96480;var oo4719=96556;var oo4718=96581;var oo4717=96340;var oo4716=96423;var oo4715=0;var oo4714=1;var oo4713=1;var oo4712=0;var oo4711=0;var oo4710=1;var oo4709=0;var oo4708=1;var oo4707=0;var oo4706=1;var oo4705=0;var oo4704=96138;var oo4703=0;var oo4702=1;var oo4701=1;var oo4700=0;var oo4699=0;var oo4698=1;var oo4697=0;var oo4696=1;var oo4695=0;var oo4694=1;var oo4693=0;var oo4692=10;var oo4691=96121;var oo4690=96137;var oo4689=96339;var oo4688=96479;var oo4687=10;var oo4686=9;var oo4685=10;var oo4684=0;var oo4683=0;var oo4682=0;var oo4681=0;var oo4680=0;var oo4679=2;var oo4678=0;var oo4677=2;var oo4676=95975;var oo4675=96108;var oo4674=9;var oo4673=95937;var oo4672=95834;var oo4671=95936;var oo4670=95974;var oo4669=95730;var oo4668=95806;var oo4667=10;var oo4666=95577;var oo4665=95391;var oo4664=95576;var oo4663=95729;var oo4662=95833;var oo4661=96120;var oo4660=95294;var oo4659=95374;var oo4658=1;var oo4657=95240;var oo4656=95102;var oo4655=95239;var oo4654=95293;var oo4653=94965;var oo4652=95018;var oo4651=94879;var oo4650=94842;var oo4649=94878;var oo4648=94964;var oo4647=95101;var oo4646=94618;var oo4645=94764;var oo4644=94529;var oo4643=94365;var oo4642=94528;var oo4641=94617;var oo4640=94288;var oo4639=94341;var oo4638=94198;var oo4637=10;var oo4636=94109;var oo4635=94197;var oo4634=94287;var oo4633=94364;var oo4632=94841;var oo4631=95390;var oo4630=96778;var oo4629=0;var oo4628=1;var oo4627=0;var oo4626=1;var oo4625=0;var oo4624=0;var oo4623=0;var oo4622=93830;var oo4621=94082;var oo4620=10;var oo4619=93758;var oo4618=93780;var oo4617=93829;var oo4616=93529;var oo4615=93714;var oo4614=1;var oo4613=93418;var oo4612=93303;var oo4611=93417;var oo4610=93528;var oo4609=93757;var oo4608=10;var oo4607=10;var oo4606=10;var oo4605=93259;var oo4604=93271;var oo4603=93186;var oo4602=93064;var oo4601=93185;var oo4600=93258;var oo4599=93002;var oo4598=93007;var oo4597=92738;var oo4596=10;var oo4595=92616;var oo4594=92737;var oo4593=93001;var oo4592=93063;var oo4591=93302;var oo4590=92473;var oo4589=92486;var oo4588=10;var oo4587=0;var oo4586=1;var oo4585=0;var oo4584=1;var oo4583=0;var oo4582=0;var oo4581=0;var oo4580=92429;var oo4579=10;var oo4578=92311;var oo4577=92428;var oo4576=92472;var oo4575=92235;var oo4574=92283;var oo4573=92142;var oo4572=91994;var oo4571=92141;var oo4570=92234;var oo4569=92310;var oo4568=10;var oo4567=91759;var oo4566=91948;var oo4565=91707;var oo4564=91673;var oo4563=91706;var oo4562=91758;var oo4561=91585;var oo4560=91635;var oo4559=91544;var oo4558=91485;var oo4557=91543;var oo4556=91584;var oo4555=91672;var oo4554=91993;var oo4553=92615;var oo4552=1;var oo4551=91362;var oo4550=91385;var oo4549=91117;var oo4548=91062;var oo4547=91116;var oo4546=91361;var oo4545=90733;var oo4544=90983;var oo4543=0;var oo4542=0;var oo4541=0;var oo4540=90648;var oo4539=1;var oo4538=0;var oo4537=1;var oo4536=0;var oo4535=1;var oo4534=0;var oo4533=0;var oo4532=0;var oo4531=90480;var oo4530=90647;var oo4529=90732;var oo4528=91061;var oo4527=90326;var oo4526=90471;var oo4525=90205;var oo4524=10;var oo4523=1;var oo4522=90134;var oo4521=90204;var oo4520=90325;var oo4519=89802;var oo4518=89925;var oo4517=0;var oo4516=0;var oo4515=0;var oo4514=89681;var oo4513=89611;var oo4512=89680;var oo4511=89801;var oo4510=90133;var oo4509=90479;var oo4508=10;var oo4507=89443;var oo4506=89592;var oo4505=89151;var oo4504=0;var oo4503=1;var oo4502=1;var oo4501=0;var oo4500=0;var oo4499=1;var oo4498=0;var oo4497=1;var oo4496=0;var oo4495=1;var oo4494=0;var oo4493=10;var oo4492=89100;var oo4491=89150;var oo4490=89442;var oo4489=88967;var oo4488=89043;var oo4487=88894;var oo4486=88833;var oo4485=88893;var oo4484=88966;var oo4483=89099;var oo4482=88751;var oo4481=88820;var oo4480=88696;var oo4479=88543;var oo4478=88695;var oo4477=88750;var oo4476=88417;var oo4475=88520;var oo4474=88305;var oo4473=88286;var oo4472=88304;var oo4471=88416;var oo4470=88542;var oo4469=88832;var oo4468=89610;var oo4467=91484;var oo4466=94108;var oo4465=88101;var oo4464=88284;var oo4463=10;var oo4462=0;var oo4461=0;var oo4460=0;var oo4459=0;var oo4458=0;var oo4457=2;var oo4456=0;var oo4455=2;var oo4454=87789;var oo4453=88069;var oo4452=88100;var oo4451=1;var oo4450=87631;var oo4449=87779;var oo4448=9;var oo4447=87520;var oo4446=1;var oo4445=87491;var oo4444=87519;var oo4443=87630;var oo4442=87788;var oo4441=0;var oo4440=1;var oo4439=1;var oo4438=0;var oo4437=0;var oo4436=1;var oo4435=0;var oo4434=1;var oo4433=0;var oo4432=1;var oo4431=0;var oo4430=87392;var oo4429=87472;var oo4428=87327;var oo4427=0;var oo4426=0;var oo4425=0;var oo4424=87288;var oo4423=87326;var oo4422=87391;var oo4421=87118;var oo4420=87161;var oo4419=87051;var oo4418=86615;var oo4417=87050;var oo4416=87117;var oo4415=87287;var oo4414=87490;var oo4413=86211;var oo4412=86462;var oo4411=86059;var oo4410=86019;var oo4409=86058;var oo4408=86210;var oo4407=0;var oo4406=0;var oo4405=0;var oo4404=85709;var oo4403=86011;var oo4402=85620;var oo4401=0;var oo4400=85599;var oo4399=85619;var oo4398=85708;var oo4397=86018;var oo4396=0;var oo4395=1;var oo4394=1;var oo4393=0;var oo4392=0;var oo4391=1;var oo4390=0;var oo4389=1;var oo4388=0;var oo4387=1;var oo4386=0;var oo4385=85431;var oo4384=85496;var oo4383=10;var oo4382=0;var oo4381=0;var oo4380=0;var oo4379=0;var oo4378=0;var oo4377=2;var oo4376=0;var oo4375=2;var oo4374=85370;var oo4373=85297;var oo4372=85369;var oo4371=85430;var oo4370=10;var oo4369=0;var oo4368=0;var oo4367=0;var oo4366=0;var oo4365=0;var oo4364=2;var oo4363=0;var oo4362=2;var oo4361=85111;var oo4360=85278;var oo4359=0;var oo4358=1;var oo4357=1;var oo4356=0;var oo4355=0;var oo4354=1;var oo4353=0;var oo4352=1;var oo4351=0;var oo4350=1;var oo4349=0;var oo4348=85013;var oo4347=84918;var oo4346=85012;var oo4345=85110;var oo4344=85296;var oo4343=85598;var oo4342=86614;var oo4341=84827;var oo4340=84860;var oo4339=1;var oo4338=10;var oo4337=1;var oo4336=10;var oo4335=10;var oo4334=1;var oo4333=1;var oo4332=84727;var oo4331=84767;var oo4330=84826;var oo4329=84397;var oo4328=84586;var oo4327=0;var oo4326=1;var oo4325=0;var oo4324=1;var oo4323=0;var oo4322=0;var oo4321=0;var oo4320=84251;var oo4319=10;var oo4318=1;var oo4317=10;var oo4316=10;var oo4315=1;var oo4314=1;var oo4313=84100;var oo4312=84250;var oo4311=84396;var oo4310=84726;var oo4309=84036;var oo4308=84082;var oo4307=84009;var oo4306=0;var oo4305=1;var oo4304=1;var oo4303=0;var oo4302=0;var oo4301=1;var oo4300=0;var oo4299=1;var oo4298=0;var oo4297=1;var oo4296=0;var oo4295=83924;var oo4294=84008;var oo4293=84035;var oo4292=10;var oo4291=83862;var oo4290=83912;var oo4289=83834;var oo4288=83719;var oo4287=83833;var oo4286=83861;var oo4285=83923;var oo4284=84099;var oo4283=83458;var oo4282=83608;var oo4281=10;var oo4280=0;var oo4279=0;var oo4278=0;var oo4277=0;var oo4276=0;var oo4275=2;var oo4274=0;var oo4273=2;var oo4272=83429;var oo4271=83097;var oo4270=83428;var oo4269=83457;var oo4268=82946;var oo4267=82991;var oo4266=82849;var oo4265=82681;var oo4264=82848;var oo4263=82945;var oo4262=83096;var oo4261=0;var oo4260=1;var oo4259=0;var oo4258=1;var oo4257=0;var oo4256=0;var oo4255=0;var oo4254=82410;var oo4253=82493;var oo4252=10;var oo4251=82334;var oo4250=82148;var oo4249=82333;var oo4248=82409;var oo4247=1;var oo4246=0;var oo4245=82011;var oo4244=82047;var oo4243=1;var oo4242=81641;var oo4241=81463;var oo4240=81640;var oo4239=82010;var oo4238=82147;var oo4237=82680;var oo4236=83718;var oo4235=84917;var oo4234=81390;var oo4233=81432;var oo4232=0;var oo4231=10;var oo4230=81332;var oo4229=81364;var oo4228=81389;var oo4227=1;var oo4226=81213;var oo4225=81239;var oo4224=81178;var oo4223=81046;var oo4222=81177;var oo4221=81212;var oo4220=81331;var oo4219=80832;var oo4218=80999;var oo4217=80821;var oo4216=80677;var oo4215=80820;var oo4214=80831;var oo4213=0;var oo4212=1;var oo4211=1;var oo4210=0;var oo4209=0;var oo4208=1;var oo4207=0;var oo4206=1;var oo4205=0;var oo4204=1;var oo4203=0;var oo4202=10;var oo4201=1;var oo4200=10;var oo4199=10;var oo4198=1;var oo4197=1;var oo4196=80470;var oo4195=80609;var oo4194=80267;var oo4193=80218;var oo4192=80266;var oo4191=80469;var oo4190=80676;var oo4189=81045;var oo4188=79990;var oo4187=80153;var oo4186=10;var oo4185=79764;var oo4184=10;var oo4183=79654;var oo4182=79763;var oo4181=79989;var oo4180=79510;var oo4179=79635;var oo4178=1;var oo4177=79458;var oo4176=9;var oo4175=79138;var oo4174=79457;var oo4173=79509;var oo4172=79653;var oo4171=79057;var oo4170=79124;var oo4169=10;var oo4168=0;var oo4167=0;var oo4166=0;var oo4165=0;var oo4164=0;var oo4163=2;var oo4162=0;var oo4161=2;var oo4160=78997;var oo4159=78888;var oo4158=78996;var oo4157=79056;var oo4156=78764;var oo4155=78865;var oo4154=78656;var oo4153=10;var oo4152=1;var oo4151=10;var oo4150=10;var oo4149=1;var oo4148=1;var oo4147=78599;var oo4146=78655;var oo4145=78763;var oo4144=78887;var oo4143=79137;var oo4142=80217;var oo4141=78420;var oo4140=78448;var oo4139=0;var oo4138=1;var oo4137=1;var oo4136=0;var oo4135=0;var oo4134=1;var oo4133=0;var oo4132=1;var oo4131=0;var oo4130=1;var oo4129=0;var oo4128=78154;var oo4127=78039;var oo4126=78153;var oo4125=78419;var oo4124=10;var oo4123=0;var oo4122=77917;var oo4121=78005;var oo4120=77867;var oo4119=77712;var oo4118=77866;var oo4117=77916;var oo4116=78038;var oo4115=77650;var oo4114=77661;var oo4113=1;var oo4112=77500;var oo4111=77314;var oo4110=77499;var oo4109=77649;var oo4108=10;var oo4107=77208;var oo4106=77271;var oo4105=10;var oo4104=76997;var oo4103=9;var oo4102=76924;var oo4101=76996;var oo4100=77207;var oo4099=77313;var oo4098=77711;var oo4097=76816;var oo4096=76874;var oo4095=76670;var oo4094=76649;var oo4093=76669;var oo4092=76815;var oo4091=76519;var oo4090=76624;var oo4089=76386;var oo4088=76146;var oo4087=76385;var oo4086=76518;var oo4085=76648;var oo4084=0;var oo4083=0;var oo4082=0;var oo4081=75980;var oo4080=76039;var oo4079=75936;var oo4078=75774;var oo4077=75935;var oo4076=75979;var oo4075=75454;var oo4074=75712;var oo4073=75431;var oo4072=75410;var oo4071=75430;var oo4070=75453;var oo4069=75773;var oo4068=76145;var oo4067=76923;var oo4066=78598;var oo4065=81462;var oo4064=88285;var oo4063=75239;var oo4062=75338;var oo4061=75113;var oo4060=75170;var oo4059=75238;var oo4058=74963;var oo4057=75055;var oo4056=9;var oo4055=74906;var oo4054=74754;var oo4053=74905;var oo4052=74962;var oo4051=75112;var oo4050=74444;var oo4049=74663;var oo4048=1;var oo4047=74412;var oo4046=74184;var oo4045=74411;var oo4044=74443;var oo4043=0;var oo4042=74085;var oo4041=74158;var oo4040=0;var oo4039=1;var oo4038=1;var oo4037=0;var oo4036=0;var oo4035=1;var oo4034=0;var oo4033=1;var oo4032=0;var oo4031=1;var oo4030=0;var oo4029=74048;var oo4028=73915;var oo4027=74047;var oo4026=74084;var oo4025=74183;var oo4024=74753;var oo4023=73751;var oo4022=73822;var oo4021=73696;var oo4020=73675;var oo4019=73695;var oo4018=73750;var oo4017=10;var oo4016=1;var oo4015=10;var oo4014=10;var oo4013=1;var oo4012=1;var oo4011=73422;var oo4010=73670;var oo4009=10;var oo4008=73216;var oo4007=0;var oo4006=0;var oo4005=0;var oo4004=73060;var oo4003=73215;var oo4002=73421;var oo4001=73674;var oo4000=10;var oo3999=72864;var oo3998=73043;var oo3997=72773;var oo3996=72619;var oo3995=72772;var oo3994=72863;var oo3993=72430;var oo3992=72613;var oo3991=72390;var oo3990=72246;var oo3989=72389;var oo3988=72429;var oo3987=72618;var oo3986=73059;var oo3985=73914;var oo3984=72057;var oo3983=72186;var oo3982=0;var oo3981=71826;var oo3980=72036;var oo3979=72056;var oo3978=71767;var oo3977=71787;var oo3976=10;var oo3975=1;var oo3974=10;var oo3973=10;var oo3972=1;var oo3971=1;var oo3970=71662;var oo3969=71618;var oo3968=71661;var oo3967=71766;var oo3966=71825;var oo3965=71440;var oo3964=71593;var oo3963=71384;var oo3962=71309;var oo3961=71383;var oo3960=71439;var oo3959=71146;var oo3958=71255;var oo3957=10;var oo3956=0;var oo3955=0;var oo3954=0;var oo3953=0;var oo3952=0;var oo3951=2;var oo3950=0;var oo3949=2;var oo3948=71105;var oo3947=10;var oo3946=0;var oo3945=0;var oo3944=0;var oo3943=0;var oo3942=0;var oo3941=2;var oo3940=0;var oo3939=2;var oo3938=71006;var oo3937=71104;var oo3936=71145;var oo3935=71308;var oo3934=71617;var oo3933=0;var oo3932=0;var oo3931=0;var oo3930=70730;var oo3929=70959;var oo3928=70625;var oo3927=70582;var oo3926=70624;var oo3925=70729;var oo3924=70303;var oo3923=70484;var oo3922=70288;var oo3921=9;var oo3920=70203;var oo3919=70287;var oo3918=70302;var oo3917=70581;var oo3916=70008;var oo3915=70111;var oo3914=69797;var oo3913=1;var oo3912=69616;var oo3911=69796;var oo3910=70007;var oo3909=69440;var oo3908=69610;var oo3907=10;var oo3906=69396;var oo3905=69333;var oo3904=69395;var oo3903=69439;var oo3902=69615;var oo3901=70202;var oo3900=71005;var oo3899=72245;var oo3898=1;var oo3897=69125;var oo3896=69201;var oo3895=10;var oo3894=0;var oo3893=1;var oo3892=1;var oo3891=0;var oo3890=0;var oo3889=1;var oo3888=0;var oo3887=1;var oo3886=0;var oo3885=1;var oo3884=0;var oo3883=68987;var oo3882=69076;var oo3881=69124;var oo3880=10;var oo3879=10;var oo3878=68902;var oo3877=68940;var oo3876=9;var oo3875=68819;var oo3874=10;var oo3873=1;var oo3872=10;var oo3871=10;var oo3870=1;var oo3869=1;var oo3868=68739;var oo3867=68818;var oo3866=68901;var oo3865=68986;var oo3864=68623;var oo3863=68712;var oo3862=68308;var oo3861=68253;var oo3860=68307;var oo3859=68622;var oo3858=68051;var oo3857=68107;var oo3856=67835;var oo3855=10;var oo3854=67827;var oo3853=67834;var oo3852=68050;var oo3851=68252;var oo3850=68738;var oo3849=67636;var oo3848=67763;var oo3847=67605;var oo3846=67489;var oo3845=67604;var oo3844=67635;var oo3843=0;var oo3842=1;var oo3841=1;var oo3840=0;var oo3839=0;var oo3838=1;var oo3837=0;var oo3836=1;var oo3835=0;var oo3834=1;var oo3833=0;var oo3832=67123;var oo3831=67423;var oo3830=67058;var oo3829=1;var oo3828=67029;var oo3827=67057;var oo3826=67122;var oo3825=67488;var oo3824=66830;var oo3823=66923;var oo3822=66776;var oo3821=66712;var oo3820=66775;var oo3819=66829;var oo3818=0;var oo3817=66523;var oo3816=66591;var oo3815=66286;var oo3814=66103;var oo3813=66285;var oo3812=66522;var oo3811=66711;var oo3810=67028;var oo3809=67826;var oo3808=66002;var oo3807=66070;var oo3806=10;var oo3805=65914;var oo3804=65664;var oo3803=65913;var oo3802=66001;var oo3801=65206;var oo3800=65652;var oo3799=0;var oo3798=0;var oo3797=0;var oo3796=65147;var oo3795=1;var oo3794=64846;var oo3793=65146;var oo3792=65205;var oo3791=65663;var oo3790=1;var oo3789=64616;var oo3788=64837;var oo3787=64561;var oo3786=64392;var oo3785=64560;var oo3784=64615;var oo3783=0;var oo3782=0;var oo3781=0;var oo3780=64340;var oo3779=64384;var oo3778=64180;var oo3777=64005;var oo3776=64179;var oo3775=64339;var oo3774=64391;var oo3773=64845;var oo3772=10;var oo3771=63812;var oo3770=63915;var oo3769=1;var oo3768=63675;var oo3767=9;var oo3766=63504;var oo3765=63674;var oo3764=63811;var oo3763=9;var oo3762=63366;var oo3761=63488;var oo3760=63222;var oo3759=0;var oo3758=63159;var oo3757=63221;var oo3756=63365;var oo3755=63503;var oo3754=62859;var oo3753=62989;var oo3752=62750;var oo3751=62654;var oo3750=62749;var oo3749=62858;var oo3748=62424;var oo3747=62574;var oo3746=62361;var oo3745=62251;var oo3744=62360;var oo3743=62423;var oo3742=62653;var oo3741=63158;var oo3740=64004;var oo3739=66102;var oo3738=69332;var oo3737=61950;var oo3736=62213;var oo3735=61634;var oo3734=61751;var oo3733=61949;var oo3732=10;var oo3731=61568;var oo3730=61626;var oo3729=61473;var oo3728=10;var oo3727=61387;var oo3726=61472;var oo3725=61567;var oo3724=61633;var oo3723=61222;var oo3722=61295;var oo3721=61160;var oo3720=1;var oo3719=10;var oo3718=61091;var oo3717=61159;var oo3716=61221;var oo3715=0;var oo3714=0;var oo3713=0;var oo3712=60887;var oo3711=60919;var oo3710=60722;var oo3709=60668;var oo3708=60721;var oo3707=60886;var oo3706=61090;var oo3705=61386;var oo3704=60599;var oo3703=60664;var oo3702=10;var oo3701=1;var oo3700=10;var oo3699=10;var oo3698=1;var oo3697=1;var oo3696=60505;var oo3695=60279;var oo3694=60504;var oo3693=60598;var oo3692=60178;var oo3691=60270;var oo3690=60015;var oo3689=59892;var oo3688=60014;var oo3687=60177;var oo3686=60278;var oo3685=10;var oo3684=59813;var oo3683=59858;var oo3682=59697;var oo3681=59666;var oo3680=59696;var oo3679=59812;var oo3678=59461;var oo3677=59553;var oo3676=59325;var oo3675=58908;var oo3674=59324;var oo3673=59460;var oo3672=59665;var oo3671=59891;var oo3670=60667;var oo3669=58816;var oo3668=58904;var oo3667=9;var oo3666=9;var oo3665=58732;var oo3664=0;var oo3663=0;var oo3662=0;var oo3661=58620;var oo3660=58731;var oo3659=58815;var oo3658=58540;var oo3657=58582;var oo3656=9;var oo3655=58449;var oo3654=10;var oo3653=0;var oo3652=0;var oo3651=0;var oo3650=0;var oo3649=0;var oo3648=2;var oo3647=0;var oo3646=2;var oo3645=58405;var oo3644=58448;var oo3643=58539;var oo3642=58619;var oo3641=10;var oo3640=0;var oo3639=58383;var oo3638=58386;var oo3637=1;var oo3636=58169;var oo3635=1;var oo3634=1;var oo3633=58002;var oo3632=58168;var oo3631=58382;var oo3630=57564;var oo3629=57870;var oo3628=57486;var oo3627=57423;var oo3626=57485;var oo3625=57563;var oo3624=58001;var oo3623=58404;var oo3622=10;var oo3621=1;var oo3620=10;var oo3619=10;var oo3618=1;var oo3617=1;var oo3616=57398;var oo3615=57420;var oo3614=57339;var oo3613=0;var oo3612=0;var oo3611=0;var oo3610=57259;var oo3609=57338;var oo3608=57397;var oo3607=57183;var oo3606=57221;var oo3605=57065;var oo3604=0;var oo3603=1;var oo3602=1;var oo3601=0;var oo3600=0;var oo3599=1;var oo3598=0;var oo3597=1;var oo3596=0;var oo3595=1;var oo3594=0;var oo3593=10;var oo3592=1;var oo3591=10;var oo3590=10;var oo3589=1;var oo3588=1;var oo3587=56713;var oo3586=57064;var oo3585=57182;var oo3584=57258;var oo3583=56573;var oo3582=56594;var oo3581=10;var oo3580=0;var oo3579=0;var oo3578=0;var oo3577=0;var oo3576=0;var oo3575=2;var oo3574=0;var oo3573=2;var oo3572=56329;var oo3571=0;var oo3570=1;var oo3569=0;var oo3568=1;var oo3567=0;var oo3566=0;var oo3565=0;var oo3564=56023;var oo3563=56328;var oo3562=56572;var oo3561=10;var oo3560=55887;var oo3559=56001;var oo3558=55838;var oo3557=55736;var oo3556=55837;var oo3555=55886;var oo3554=56022;var oo3553=56712;var oo3552=57422;var oo3551=58907;var oo3550=55645;var oo3549=55698;var oo3548=55403;var oo3547=55553;var oo3546=55644;var oo3545=0;var oo3544=1;var oo3543=0;var oo3542=1;var oo3541=0;var oo3540=0;var oo3539=0;var oo3538=55211;var oo3537=55376;var oo3536=0;var oo3535=1;var oo3534=1;var oo3533=0;var oo3532=0;var oo3531=1;var oo3530=0;var oo3529=1;var oo3528=0;var oo3527=1;var oo3526=0;var oo3525=55070;var oo3524=10;var oo3523=0;var oo3522=0;var oo3521=0;var oo3520=0;var oo3519=0;var oo3518=2;var oo3517=0;var oo3516=2;var oo3515=55008;var oo3514=55069;var oo3513=55210;var oo3512=55402;var oo3511=54902;var oo3510=54996;var oo3509=54704;var oo3508=54569;var oo3507=54703;var oo3506=54901;var oo3505=54442;var oo3504=54529;var oo3503=54332;var oo3502=54272;var oo3501=54331;var oo3500=54441;var oo3499=54568;var oo3498=55007;var oo3497=54098;var oo3496=54239;var oo3495=53633;var oo3494=53521;var oo3493=53632;var oo3492=54097;var oo3491=53369;var oo3490=53519;var oo3489=10;var oo3488=0;var oo3487=0;var oo3486=0;var oo3485=0;var oo3484=0;var oo3483=2;var oo3482=0;var oo3481=2;var oo3480=53310;var oo3479=53206;var oo3478=53309;var oo3477=53368;var oo3476=53520;var oo3475=52995;var oo3474=53010;var oo3473=1;var oo3472=52985;var oo3471=52927;var oo3470=52984;var oo3469=52994;var oo3468=52824;var oo3467=52886;var oo3466=1;var oo3465=52752;var oo3464=52685;var oo3463=52751;var oo3462=52823;var oo3461=52926;var oo3460=53205;var oo3459=54271;var oo3458=52388;var oo3457=52508;var oo3456=52058;var oo3455=51706;var oo3454=52057;var oo3453=52387;var oo3452=51512;var oo3451=51620;var oo3450=51411;var oo3449=51330;var oo3448=51410;var oo3447=51511;var oo3446=51705;var oo3445=9;var oo3444=0;var oo3443=51074;var oo3442=51219;var oo3441=51023;var oo3440=50934;var oo3439=51022;var oo3438=51073;var oo3437=50812;var oo3436=50927;var oo3435=10;var oo3434=10;var oo3433=0;var oo3432=0;var oo3431=0;var oo3430=0;var oo3429=0;var oo3428=2;var oo3427=0;var oo3426=2;var oo3425=50739;var oo3424=50664;var oo3423=50738;var oo3422=50811;var oo3421=50933;var oo3420=51329;var oo3419=0;var oo3418=1;var oo3417=50616;var oo3416=50640;var oo3415=1;var oo3414=50398;var oo3413=50343;var oo3412=50397;var oo3411=50615;var oo3410=10;var oo3409=1;var oo3408=10;var oo3407=10;var oo3406=1;var oo3405=1;var oo3404=50254;var oo3403=50339;var oo3402=0;var oo3401=0;var oo3400=0;var oo3399=50137;var oo3398=50020;var oo3397=50136;var oo3396=50253;var oo3395=50342;var oo3394=49905;var oo3393=49988;var oo3392=49832;var oo3391=49610;var oo3390=49831;var oo3389=49904;var oo3388=1;var oo3387=49571;var oo3386=49592;var oo3385=49381;var oo3384=0;var oo3383=1;var oo3382=1;var oo3381=0;var oo3380=0;var oo3379=1;var oo3378=0;var oo3377=1;var oo3376=0;var oo3375=1;var oo3374=0;var oo3373=49360;var oo3372=49380;var oo3371=49570;var oo3370=49609;var oo3369=50019;var oo3368=50663;var oo3367=52684;var oo3366=55735;var oo3365=62250;var oo3364=75409;var oo3363=49110;var oo3362=49221;var oo3361=48962;var oo3360=49058;var oo3359=49109;var oo3358=0;var oo3357=1;var oo3356=1;var oo3355=0;var oo3354=0;var oo3353=1;var oo3352=0;var oo3351=1;var oo3350=0;var oo3349=1;var oo3348=0;var oo3347=48797;var oo3346=48910;var oo3345=10;var oo3344=0;var oo3343=0;var oo3342=0;var oo3341=0;var oo3340=0;var oo3339=2;var oo3338=0;var oo3337=2;var oo3336=48584;var oo3335=48565;var oo3334=48583;var oo3333=48796;var oo3332=48961;var oo3331=48374;var oo3330=48471;var oo3329=10;var oo3328=1;var oo3327=10;var oo3326=10;var oo3325=1;var oo3324=1;var oo3323=48114;var oo3322=47907;var oo3321=48113;var oo3320=48373;var oo3319=0;var oo3318=47793;var oo3317=47904;var oo3316=10;var oo3315=10;var oo3314=47719;var oo3313=0;var oo3312=1;var oo3311=0;var oo3310=1;var oo3309=0;var oo3308=0;var oo3307=0;var oo3306=47626;var oo3305=47718;var oo3304=47792;var oo3303=47906;var oo3302=48564;var oo3301=0;var oo3300=1;var oo3299=1;var oo3298=0;var oo3297=0;var oo3296=1;var oo3295=0;var oo3294=1;var oo3293=0;var oo3292=1;var oo3291=0;var oo3290=47542;var oo3289=47616;var oo3288=1;var oo3287=47474;var oo3286=10;var oo3285=47451;var oo3284=47473;var oo3283=47541;var oo3282=47176;var oo3281=47257;var oo3280=1;var oo3279=1;var oo3278=47045;var oo3277=46996;var oo3276=47044;var oo3275=47175;var oo3274=47450;var oo3273=10;var oo3272=0;var oo3271=0;var oo3270=0;var oo3269=0;var oo3268=0;var oo3267=2;var oo3266=0;var oo3265=2;var oo3264=46882;var oo3263=46989;var oo3262=0;var oo3261=46774;var oo3260=46721;var oo3259=46773;var oo3258=46881;var oo3257=1;var oo3256=46547;var oo3255=46696;var oo3254=46514;var oo3253=1;var oo3252=46465;var oo3251=46513;var oo3250=46546;var oo3249=46720;var oo3248=46995;var oo3247=47625;var oo3246=1;var oo3245=46363;var oo3244=46431;var oo3243=45970;var oo3242=46358;var oo3241=46362;var oo3240=45888;var oo3239=45958;var oo3238=10;var oo3237=45770;var oo3236=45702;var oo3235=45769;var oo3234=45887;var oo3233=45969;var oo3232=45526;var oo3231=45692;var oo3230=45465;var oo3229=45257;var oo3228=45464;var oo3227=45525;var oo3226=45162;var oo3225=45243;var oo3224=45072;var oo3223=44998;var oo3222=45071;var oo3221=45161;var oo3220=45256;var oo3219=45701;var oo3218=44839;var oo3217=44925;var oo3216=44775;var oo3215=44650;var oo3214=44774;var oo3213=44838;var oo3212=1;var oo3211=44499;var oo3210=44601;var oo3209=44460;var oo3208=44239;var oo3207=44459;var oo3206=44498;var oo3205=44649;var oo3204=10;var oo3203=1;var oo3202=10;var oo3201=10;var oo3200=1;var oo3199=1;var oo3198=44204;var oo3197=44222;var oo3196=44065;var oo3195=43848;var oo3194=44064;var oo3193=44203;var oo3192=1;var oo3191=43753;var oo3190=43838;var oo3189=10;var oo3188=1;var oo3187=10;var oo3186=10;var oo3185=1;var oo3184=1;var oo3183=43638;var oo3182=43287;var oo3181=43637;var oo3180=43752;var oo3179=43847;var oo3178=44238;var oo3177=44997;var oo3176=46464;var oo3175=0;var oo3174=1;var oo3173=0;var oo3172=1;var oo3171=0;var oo3170=0;var oo3169=0;var oo3168=43234;var oo3167=43241;var oo3166=9;var oo3165=43029;var oo3164=43178;var oo3163=43233;var oo3162=1;var oo3161=43003;var oo3160=43022;var oo3159=10;var oo3158=0;var oo3157=0;var oo3156=0;var oo3155=0;var oo3154=0;var oo3153=2;var oo3152=0;var oo3151=2;var oo3150=42957;var oo3149=42889;var oo3148=42956;var oo3147=43002;var oo3146=43028;var oo3145=10;var oo3144=0;var oo3143=0;var oo3142=0;var oo3141=0;var oo3140=0;var oo3139=2;var oo3138=0;var oo3137=2;var oo3136=42871;var oo3135=42883;var oo3134=0;var oo3133=1;var oo3132=1;var oo3131=0;var oo3130=0;var oo3129=1;var oo3128=0;var oo3127=1;var oo3126=0;var oo3125=1;var oo3124=0;var oo3123=42622;var oo3122=42434;var oo3121=42621;var oo3120=42870;var oo3119=42332;var oo3118=42429;var oo3117=42292;var oo3116=42125;var oo3115=42291;var oo3114=42331;var oo3113=42433;var oo3112=42888;var oo3111=41722;var oo3110=42071;var oo3109=10;var oo3108=0;var oo3107=0;var oo3106=0;var oo3105=0;var oo3104=0;var oo3103=2;var oo3102=0;var oo3101=2;var oo3100=41647;var oo3099=41498;var oo3098=41646;var oo3097=41721;var oo3096=41344;var oo3095=41423;var oo3094=41315;var oo3093=41291;var oo3092=41314;var oo3091=41343;var oo3090=41497;var oo3089=41126;var oo3088=41276;var oo3087=41097;var oo3086=41053;var oo3085=41096;var oo3084=41125;var oo3083=40847;var oo3082=41006;var oo3081=40832;var oo3080=1;var oo3079=40828;var oo3078=40831;var oo3077=40846;var oo3076=41052;var oo3075=41290;var oo3074=42124;var oo3073=0;var oo3072=10;var oo3071=40757;var oo3070=40793;var oo3069=40736;var oo3068=1;var oo3067=1;var oo3066=40721;var oo3065=40735;var oo3064=40756;var oo3063=1;var oo3062=40574;var oo3061=40628;var oo3060=0;var oo3059=1;var oo3058=0;var oo3057=1;var oo3056=0;var oo3055=0;var oo3054=0;var oo3053=40435;var oo3052=40376;var oo3051=40434;var oo3050=40573;var oo3049=40720;var oo3048=0;var oo3047=1;var oo3046=1;var oo3045=0;var oo3044=0;var oo3043=1;var oo3042=0;var oo3041=1;var oo3040=0;var oo3039=1;var oo3038=0;var oo3037=40278;var oo3036=40332;var oo3035=0;var oo3034=40218;var oo3033=40141;var oo3032=40217;var oo3031=40277;var oo3030=0;var oo3029=1;var oo3028=0;var oo3027=1;var oo3026=0;var oo3025=0;var oo3024=0;var oo3023=10;var oo3022=40009;var oo3021=40103;var oo3020=39997;var oo3019=1;var oo3018=39931;var oo3017=39996;var oo3016=40008;var oo3015=40140;var oo3014=40375;var oo3013=9;var oo3012=39785;var oo3011=39897;var oo3010=10;var oo3009=39486;var oo3008=39440;var oo3007=39485;var oo3006=39784;var oo3005=0;var oo3004=1;var oo3003=0;var oo3002=1;var oo3001=0;var oo3000=0;var oo2999=0;var oo2998=39226;var oo2997=39406;var oo2996=39054;var oo2995=38824;var oo2994=39053;var oo2993=39225;var oo2992=39439;var oo2991=10;var oo2990=38676;var oo2989=38780;var oo2988=38581;var oo2987=38492;var oo2986=38580;var oo2985=38675;var oo2984=0;var oo2983=1;var oo2982=1;var oo2981=0;var oo2980=0;var oo2979=1;var oo2978=0;var oo2977=1;var oo2976=0;var oo2975=1;var oo2974=0;var oo2973=38303;var oo2972=38452;var oo2971=38118;var oo2970=0;var oo2969=1;var oo2968=0;var oo2967=1;var oo2966=0;var oo2965=0;var oo2964=0;var oo2963=38024;var oo2962=38117;var oo2961=38302;var oo2960=38491;var oo2959=38823;var oo2958=39930;var oo2957=40827;var oo2956=43286;var oo2955=37889;var oo2954=37906;var oo2953=37738;var oo2952=37832;var oo2951=37888;var oo2950=0;var oo2949=1;var oo2948=0;var oo2947=1;var oo2946=0;var oo2945=0;var oo2944=0;var oo2943=37289;var oo2942=37673;var oo2941=37212;var oo2940=37187;var oo2939=37211;var oo2938=37288;var oo2937=37737;var oo2936=37003;var oo2935=37016;var oo2934=10;var oo2933=36904;var oo2932=36693;var oo2931=36903;var oo2930=37002;var oo2929=36548;var oo2928=36568;var oo2927=36412;var oo2926=10;var oo2925=36369;var oo2924=36411;var oo2923=36547;var oo2922=36692;var oo2921=37186;var oo2920=10;var oo2919=36107;var oo2918=36323;var oo2917=36026;var oo2916=0;var oo2915=35732;var oo2914=36025;var oo2913=36106;var oo2912=0;var oo2911=1;var oo2910=0;var oo2909=1;var oo2908=0;var oo2907=0;var oo2906=0;var oo2905=35578;var oo2904=35654;var oo2903=10;var oo2902=35430;var oo2901=35344;var oo2900=35429;var oo2899=35577;var oo2898=35731;var oo2897=34970;var oo2896=35304;var oo2895=34931;var oo2894=0;var oo2893=1;var oo2892=0;var oo2891=1;var oo2890=0;var oo2889=0;var oo2888=0;var oo2887=34793;var oo2886=34930;var oo2885=34969;var oo2884=10;var oo2883=0;var oo2882=0;var oo2881=0;var oo2880=0;var oo2879=0;var oo2878=2;var oo2877=0;var oo2876=2;var oo2875=34702;var oo2874=34739;var oo2873=34417;var oo2872=34239;var oo2871=34416;var oo2870=34701;var oo2869=34792;var oo2868=35343;var oo2867=36368;var oo2866=10;var oo2865=9;var oo2864=34090;var oo2863=34180;var oo2862=34027;var oo2861=34049;var oo2860=34089;var oo2859=33929;var oo2858=34018;var oo2857=1;var oo2856=33813;var oo2855=33718;var oo2854=33812;var oo2853=33928;var oo2852=34026;var oo2851=33443;var oo2850=33666;var oo2849=0;var oo2848=0;var oo2847=0;var oo2846=33311;var oo2845=10;var oo2844=33223;var oo2843=33310;var oo2842=33442;var oo2841=0;var oo2840=0;var oo2839=0;var oo2838=33010;var oo2837=33216;var oo2836=0;var oo2835=1;var oo2834=0;var oo2833=1;var oo2832=0;var oo2831=0;var oo2830=0;var oo2829=32955;var oo2828=0;var oo2827=0;var oo2826=0;var oo2825=32879;var oo2824=32954;var oo2823=33009;var oo2822=33222;var oo2821=33717;var oo2820=9;var oo2819=32636;var oo2818=32865;var oo2817=32567;var oo2816=32537;var oo2815=32566;var oo2814=32635;var oo2813=32352;var oo2812=32512;var oo2811=10;var oo2810=32127;var oo2809=31977;var oo2808=32126;var oo2807=32351;var oo2806=32536;var oo2805=31618;var oo2804=31848;var oo2803=31573;var oo2802=31164;var oo2801=31572;var oo2800=31617;var oo2799=31052;var oo2798=31069;var oo2797=30911;var oo2796=0;var oo2795=1;var oo2794=1;var oo2793=0;var oo2792=0;var oo2791=1;var oo2790=0;var oo2789=1;var oo2788=0;var oo2787=1;var oo2786=0;var oo2785=30843;var oo2784=30910;var oo2783=31051;var oo2782=31163;var oo2781=31976;var oo2780=32878;var oo2779=34238;var oo2778=0;var oo2777=1;var oo2776=1;var oo2775=0;var oo2774=0;var oo2773=1;var oo2772=0;var oo2771=1;var oo2770=0;var oo2769=1;var oo2768=0;var oo2767=1;var oo2766=30626;var oo2765=30693;var oo2764=30579;var oo2763=30624;var oo2762=30625;var oo2761=30479;var oo2760=30554;var oo2759=30204;var oo2758=30188;var oo2757=30203;var oo2756=30478;var oo2755=30578;var oo2754=10;var oo2753=30100;var oo2752=30124;var oo2751=30054;var oo2750=9;var oo2749=10;var oo2748=29988;var oo2747=30053;var oo2746=30099;var oo2745=10;var oo2744=29879;var oo2743=29915;var oo2742=10;var oo2741=1;var oo2740=10;var oo2739=10;var oo2738=1;var oo2737=1;var oo2736=29816;var oo2735=29780;var oo2734=29815;var oo2733=29878;var oo2732=29987;var oo2731=30187;var oo2730=10;var oo2729=1;var oo2728=29661;var oo2727=29697;var oo2726=10;var oo2725=29299;var oo2724=29196;var oo2723=29298;var oo2722=29660;var oo2721=28903;var oo2720=29108;var oo2719=28797;var oo2718=28742;var oo2717=28796;var oo2716=28902;var oo2715=29195;var oo2714=28617;var oo2713=28718;var oo2712=28549;var oo2711=28435;var oo2710=28548;var oo2709=28616;var oo2708=1;var oo2707=0;var oo2706=1;var oo2705=0;var oo2704=1;var oo2703=0;var oo2702=0;var oo2701=0;var oo2700=28405;var oo2699=28419;var oo2698=10;var oo2697=1;var oo2696=10;var oo2695=10;var oo2694=1;var oo2693=1;var oo2692=0;var oo2691=1;var oo2690=1;var oo2689=0;var oo2688=0;var oo2687=1;var oo2686=0;var oo2685=1;var oo2684=0;var oo2683=1;var oo2682=0;var oo2681=28320;var oo2680=9;var oo2679=28274;var oo2678=28319;var oo2677=28404;var oo2676=28434;var oo2675=28741;var oo2674=29779;var oo2673=1;var oo2672=0;var oo2671=0;var oo2670=0;var oo2669=27941;var oo2668=28145;var oo2667=1;var oo2666=27778;var oo2665=0;var oo2664=1;var oo2663=0;var oo2662=1;var oo2661=0;var oo2660=0;var oo2659=0;var oo2658=27706;var oo2657=27777;var oo2656=27940;var oo2655=10;var oo2654=0;var oo2653=0;var oo2652=0;var oo2651=0;var oo2650=0;var oo2649=2;var oo2648=0;var oo2647=2;var oo2646=27646;var oo2645=27691;var oo2644=0;var oo2643=1;var oo2642=1;var oo2641=0;var oo2640=0;var oo2639=1;var oo2638=0;var oo2637=1;var oo2636=0;var oo2635=1;var oo2634=0;var oo2633=27581;var oo2632=27509;var oo2631=27580;var oo2630=27645;var oo2629=27705;var oo2628=27448;var oo2627=27471;var oo2626=27368;var oo2625=0;var oo2624=0;var oo2623=0;var oo2622=27030;var oo2621=27367;var oo2620=27447;var oo2619=26816;var oo2618=27026;var oo2617=10;var oo2616=1;var oo2615=10;var oo2614=10;var oo2613=1;var oo2612=1;var oo2611=26715;var oo2610=26646;var oo2609=26714;var oo2608=26815;var oo2607=27029;var oo2606=27508;var oo2605=26471;var oo2604=26550;var oo2603=10;var oo2602=26355;var oo2601=0;var oo2600=1;var oo2599=1;var oo2598=0;var oo2597=0;var oo2596=1;var oo2595=0;var oo2594=1;var oo2593=0;var oo2592=1;var oo2591=0;var oo2590=26267;var oo2589=26354;var oo2588=26470;var oo2587=1;var oo2586=25988;var oo2585=26108;var oo2584=25961;var oo2583=0;var oo2582=25891;var oo2581=25960;var oo2580=25987;var oo2579=26266;var oo2578=10;var oo2577=25716;var oo2576=25887;var oo2575=25428;var oo2574=0;var oo2573=25187;var oo2572=25427;var oo2571=25715;var oo2570=24975;var oo2569=25115;var oo2568=10;var oo2567=0;var oo2566=0;var oo2565=0;var oo2564=0;var oo2563=0;var oo2562=2;var oo2561=0;var oo2560=2;var oo2559=24953;var oo2558=1;var oo2557=10;var oo2556=1;var oo2555=10;var oo2554=10;var oo2553=1;var oo2552=1;var oo2551=24755;var oo2550=24952;var oo2549=24974;var oo2548=25186;var oo2547=25890;var oo2546=26645;var oo2545=28273;var oo2544=30842;var oo2543=38023;var oo2542=24490;var oo2541=24666;var oo2540=10;var oo2539=24228;var oo2538=24413;var oo2537=24489;var oo2536=10;var oo2535=24067;var oo2534=24215;var oo2533=23942;var oo2532=23771;var oo2531=23941;var oo2530=24066;var oo2529=24227;var oo2528=23616;var oo2527=23732;var oo2526=23353;var oo2525=10;var oo2524=0;var oo2523=1;var oo2522=0;var oo2521=1;var oo2520=0;var oo2519=0;var oo2518=0;var oo2517=23261;var oo2516=23352;var oo2515=23615;var oo2514=10;var oo2513=23020;var oo2512=23215;var oo2511=22961;var oo2510=22860;var oo2509=22960;var oo2508=23019;var oo2507=23260;var oo2506=23770;var oo2505=22554;var oo2504=22715;var oo2503=22504;var oo2502=22440;var oo2501=22503;var oo2500=22553;var oo2499=22362;var oo2498=22385;var oo2497=10;var oo2496=22238;var oo2495=0;var oo2494=0;var oo2493=0;var oo2492=22055;var oo2491=22237;var oo2490=22361;var oo2489=22439;var oo2488=10;var oo2487=1;var oo2486=10;var oo2485=10;var oo2484=1;var oo2483=1;var oo2482=21817;var oo2481=22052;var oo2480=9;var oo2479=21787;var oo2478=21685;var oo2477=21786;var oo2476=21816;var oo2475=10;var oo2474=10;var oo2473=21558;var oo2472=21660;var oo2471=21503;var oo2470=21368;var oo2469=21502;var oo2468=21557;var oo2467=21684;var oo2466=22054;var oo2465=22859;var oo2464=21211;var oo2463=21293;var oo2462=21147;var oo2461=21160;var oo2460=21210;var oo2459=10;var oo2458=21082;var oo2457=21144;var oo2456=20932;var oo2455=0;var oo2454=0;var oo2453=0;var oo2452=20808;var oo2451=20931;var oo2450=21081;var oo2449=21146;var oo2448=20382;var oo2447=20586;var oo2446=20337;var oo2445=20168;var oo2444=20336;var oo2443=20381;var oo2442=1;var oo2441=20084;var oo2440=20093;var oo2439=19897;var oo2438=10;var oo2437=0;var oo2436=0;var oo2435=0;var oo2434=0;var oo2433=0;var oo2432=2;var oo2431=0;var oo2430=2;var oo2429=0;var oo2428=1;var oo2427=0;var oo2426=1;var oo2425=0;var oo2424=0;var oo2423=0;var oo2422=19821;var oo2421=19896;var oo2420=20083;var oo2419=20167;var oo2418=20807;var oo2417=19511;var oo2416=19676;var oo2415=19382;var oo2414=10;var oo2413=19364;var oo2412=19381;var oo2411=19510;var oo2410=19346;var oo2409=19349;var oo2408=19294;var oo2407=0;var oo2406=1;var oo2405=0;var oo2404=1;var oo2403=0;var oo2402=0;var oo2401=0;var oo2400=19215;var oo2399=19293;var oo2398=19345;var oo2397=19363;var oo2396=19047;var oo2395=19110;var oo2394=18983;var oo2393=18851;var oo2392=18982;var oo2391=19046;var oo2390=18715;var oo2389=18832;var oo2388=10;var oo2387=1;var oo2386=10;var oo2385=10;var oo2384=1;var oo2383=1;var oo2382=18509;var oo2381=18470;var oo2380=18508;var oo2379=18714;var oo2378=18850;var oo2377=19214;var oo2376=19820;var oo2375=21367;var oo2374=18386;var oo2373=18462;var oo2372=18155;var oo2371=18310;var oo2370=18385;var oo2369=17825;var oo2368=18010;var oo2367=17676;var oo2366=1;var oo2365=17403;var oo2364=17675;var oo2363=17824;var oo2362=18154;var oo2361=10;var oo2360=0;var oo2359=0;var oo2358=0;var oo2357=17275;var oo2356=17335;var oo2355=17046;var oo2354=16926;var oo2353=17045;var oo2352=17274;var oo2351=16712;var oo2350=16788;var oo2349=10;var oo2348=0;var oo2347=0;var oo2346=0;var oo2345=0;var oo2344=0;var oo2343=2;var oo2342=0;var oo2341=2;var oo2340=16531;var oo2339=0;var oo2338=1;var oo2337=1;var oo2336=0;var oo2335=0;var oo2334=1;var oo2333=0;var oo2332=1;var oo2331=0;var oo2330=1;var oo2329=0;var oo2328=16412;var oo2327=16530;var oo2326=16711;var oo2325=16925;var oo2324=17402;var oo2323=16281;var oo2322=16370;var oo2321=16260;var oo2320=16134;var oo2319=16259;var oo2318=16280;var oo2317=0;var oo2316=0;var oo2315=0;var oo2314=16042;var oo2313=16079;var oo2312=15913;var oo2311=15714;var oo2310=15912;var oo2309=16041;var oo2308=16133;var oo2307=1;var oo2306=15377;var oo2305=15569;var oo2304=15315;var oo2303=15265;var oo2302=15314;var oo2301=15376;var oo2300=15109;var oo2299=15253;var oo2298=14810;var oo2297=10;var oo2296=1;var oo2295=10;var oo2294=10;var oo2293=1;var oo2292=1;var oo2291=1;var oo2290=14744;var oo2289=14809;var oo2288=15108;var oo2287=15264;var oo2286=15713;var oo2285=16411;var oo2284=14589;var oo2283=14720;var oo2282=14516;var oo2281=0;var oo2280=1;var oo2279=0;var oo2278=1;var oo2277=0;var oo2276=0;var oo2275=0;var oo2274=14454;var oo2273=14515;var oo2272=14588;var oo2271=14322;var oo2270=14379;var oo2269=10;var oo2268=14289;var oo2267=10;var oo2266=1;var oo2265=10;var oo2264=10;var oo2263=1;var oo2262=1;var oo2261=14262;var oo2260=14288;var oo2259=14321;var oo2258=14453;var oo2257=0;var oo2256=13940;var oo2255=14126;var oo2254=13829;var oo2253=10;var oo2252=13764;var oo2251=13828;var oo2250=13939;var oo2249=10;var oo2248=0;var oo2247=0;var oo2246=0;var oo2245=0;var oo2244=0;var oo2243=2;var oo2242=0;var oo2241=2;var oo2240=13607;var oo2239=13713;var oo2238=13589;var oo2237=13535;var oo2236=13588;var oo2235=13606;var oo2234=13763;var oo2233=14261;var oo2232=13335;var oo2231=13505;var oo2230=13257;var oo2229=13158;var oo2228=13256;var oo2227=13334;var oo2226=10;var oo2225=1;var oo2224=10;var oo2223=10;var oo2222=1;var oo2221=1;var oo2220=13027;var oo2219=13141;var oo2218=12837;var oo2217=1;var oo2216=12714;var oo2215=12836;var oo2214=13026;var oo2213=13157;var oo2212=1;var oo2211=12584;var oo2210=12689;var oo2209=12453;var oo2208=10;var oo2207=0;var oo2206=0;var oo2205=0;var oo2204=0;var oo2203=0;var oo2202=2;var oo2201=0;var oo2200=2;var oo2199=12398;var oo2198=12452;var oo2197=12583;var oo2196=12232;var oo2195=12319;var oo2194=12122;var oo2193=10;var oo2192=1;var oo2191=10;var oo2190=10;var oo2189=1;var oo2188=1;var oo2187=12088;var oo2186=12121;var oo2185=12231;var oo2184=12397;var oo2183=12713;var oo2182=13534;var oo2181=14743;var oo2180=18469;var oo2179=10;var oo2178=11952;var oo2177=12049;var oo2176=10;var oo2175=11856;var oo2174=11950;var oo2173=11951;var oo2172=0;var oo2171=1;var oo2170=1;var oo2169=0;var oo2168=0;var oo2167=1;var oo2166=0;var oo2165=1;var oo2164=0;var oo2163=1;var oo2162=0;var oo2161=10;var oo2160=9;var oo2159=11793;var oo2158=11824;var oo2157=1;var oo2156=11771;var oo2155=0;var oo2154=11556;var oo2153=11770;var oo2152=11792;var oo2151=11855;var oo2150=0;var oo2149=1;var oo2148=0;var oo2147=1;var oo2146=0;var oo2145=0;var oo2144=0;var oo2143=11373;var oo2142=11499;var oo2141=10;var oo2140=11287;var oo2139=11171;var oo2138=11286;var oo2137=11372;var oo2136=11008;var oo2135=11165;var oo2134=10969;var oo2133=10931;var oo2132=10968;var oo2131=11007;var oo2130=11170;var oo2129=11555;var oo2128=0;var oo2127=1;var oo2126=0;var oo2125=1;var oo2124=0;var oo2123=0;var oo2122=0;var oo2121=9;var oo2120=10805;var oo2119=10926;var oo2118=10761;var oo2117=10702;var oo2116=10760;var oo2115=10804;var oo2114=9;var oo2113=10;var oo2112=10505;var oo2111=10622;var oo2110=10252;var oo2109=10125;var oo2108=10251;var oo2107=10504;var oo2106=10701;var oo2105=10;var oo2104=0;var oo2103=10021;var oo2102=10120;var oo2101=9984;var oo2100=9881;var oo2099=9983;var oo2098=10020;var oo2097=9765;var oo2096=9863;var oo2095=9684;var oo2094=9642;var oo2093=9683;var oo2092=9764;var oo2091=9880;var oo2090=10124;var oo2089=10930;var oo2088=0;var oo2087=0;var oo2086=0;var oo2085=0;var oo2084=9456;var oo2083=9595;var oo2082=9389;var oo2081=9353;var oo2080=9388;var oo2079=9455;var oo2078=9111;var oo2077=9300;var oo2076=8816;var oo2075=0;var oo2074=8754;var oo2073=8815;var oo2072=9110;var oo2071=9352;var oo2070=10;var oo2069=0;var oo2068=0;var oo2067=0;var oo2066=8601;var oo2065=8750;var oo2064=9;var oo2063=8341;var oo2062=8228;var oo2061=8340;var oo2060=8600;var oo2059=0;var oo2058=0;var oo2057=0;var oo2056=7839;var oo2055=8029;var oo2054=0;var oo2053=0;var oo2052=0;var oo2051=7702;var oo2050=10;var oo2049=0;var oo2048=1;var oo2047=1;var oo2046=0;var oo2045=0;var oo2044=1;var oo2043=0;var oo2042=1;var oo2041=0;var oo2040=1;var oo2039=0;var oo2038=7581;var oo2037=7701;var oo2036=7838;var oo2035=8227;var oo2034=8753;var oo2033=7434;var oo2032=7486;var oo2031=10;var oo2030=7318;var oo2029=7213;var oo2028=7317;var oo2027=7433;var oo2026=6954;var oo2025=7188;var oo2024=10;var oo2023=1;var oo2022=10;var oo2021=10;var oo2020=1;var oo2019=1;var oo2018=1;var oo2017=6936;var oo2016=10;var oo2015=1;var oo2014=10;var oo2013=10;var oo2012=1;var oo2011=1;var oo2010=6915;var oo2009=6935;var oo2008=6953;var oo2007=7212;var oo2006=6406;var oo2005=6524;var oo2004=0;var oo2003=1;var oo2002=1;var oo2001=0;var oo2000=0;var oo1999=1;var oo1998=0;var oo1997=1;var oo1996=0;var oo1995=1;var oo1994=0;var oo1993=6283;var oo1992=0;var oo1991=0;var oo1990=0;var oo1989=6203;var oo1988=6282;var oo1987=6405;var oo1986=0;var oo1985=6104;var oo1984=6129;var oo1983=5973;var oo1982=10;var oo1981=5933;var oo1980=5972;var oo1979=6103;var oo1978=6202;var oo1977=6914;var oo1976=7580;var oo1975=9641;var oo1974=10;var oo1973=5890;var oo1972=5910;var oo1971=5506;var oo1970=5745;var oo1969=5889;var oo1968=5450;var oo1967=5503;var oo1966=5352;var oo1965=5273;var oo1964=5351;var oo1963=5449;var oo1962=5505;var oo1961=0;var oo1960=1;var oo1959=0;var oo1958=1;var oo1957=0;var oo1956=0;var oo1955=0;var oo1954=0;var oo1953=0;var oo1952=0;var oo1951=5045;var oo1950=5191;var oo1949=0;var oo1948=1;var oo1947=1;var oo1946=0;var oo1945=0;var oo1944=1;var oo1943=0;var oo1942=1;var oo1941=0;var oo1940=1;var oo1939=0;var oo1938=4853;var oo1937=4754;var oo1936=4852;var oo1935=5044;var oo1934=0;var oo1933=4459;var oo1932=4728;var oo1931=0;var oo1930=0;var oo1929=0;var oo1928=4300;var oo1927=0;var oo1926=4206;var oo1925=4299;var oo1924=4458;var oo1923=4753;var oo1922=5272;var oo1921=4179;var oo1920=4201;var oo1919=4099;var oo1918=10;var oo1917=1;var oo1916=10;var oo1915=10;var oo1914=1;var oo1913=1;var oo1912=4024;var oo1911=4098;var oo1910=4178;var oo1909=10;var oo1908=3889;var oo1907=4019;var oo1906=10;var oo1905=0;var oo1904=0;var oo1903=0;var oo1902=0;var oo1901=0;var oo1900=2;var oo1899=0;var oo1898=2;var oo1897=3824;var oo1896=3506;var oo1895=3823;var oo1894=3888;var oo1893=4023;var oo1892=9;var oo1891=3265;var oo1890=3428;var oo1889=0;var oo1888=1;var oo1887=0;var oo1886=1;var oo1885=0;var oo1884=0;var oo1883=0;var oo1882=10;var oo1881=0;var oo1880=0;var oo1879=0;var oo1878=0;var oo1877=0;var oo1876=2;var oo1875=0;var oo1874=2;var oo1873=3109;var oo1872=10;var oo1871=2969;var oo1870=3108;var oo1869=3264;var oo1868=2824;var oo1867=2956;var oo1866=2796;var oo1865=2714;var oo1864=2795;var oo1863=2823;var oo1862=2968;var oo1861=3505;var oo1860=4205;var oo1859=2682;var oo1858=2700;var oo1857=2581;var oo1856=2467;var oo1855=2580;var oo1854=2681;var oo1853=10;var oo1852=2265;var oo1851=2362;var oo1850=2148;var oo1849=2129;var oo1848=2147;var oo1847=2264;var oo1846=2466;var oo1845=0;var oo1844=0;var oo1843=0;var oo1842=1988;var oo1841=2112;var oo1840=1880;var oo1839=0;var oo1838=1829;var oo1837=1879;var oo1836=1987;var oo1835=10;var oo1834=0;var oo1833=0;var oo1832=0;var oo1831=0;var oo1830=0;var oo1829=2;var oo1828=0;var oo1827=2;var oo1826=1769;var oo1825=1820;var oo1824=1661;var oo1823=0;var oo1822=10;var oo1821=1;var oo1820=10;var oo1819=10;var oo1818=1;var oo1817=1;var oo1816=1526;var oo1815=1660;var oo1814=1768;var oo1813=1828;var oo1812=2128;var oo1811=1476;var oo1810=1500;var oo1809=0;var oo1808=1;var oo1807=0;var oo1806=1;var oo1805=0;var oo1804=0;var oo1803=0;var oo1802=1339;var oo1801=1287;var oo1800=1338;var oo1799=1475;var oo1798=9;var oo1797=1127;var oo1796=1258;var oo1795=978;var oo1794=921;var oo1793=977;var oo1792=1126;var oo1791=1286;var oo1790=702;var oo1789=828;var oo1788=553;var oo1787=479;var oo1786=552;var oo1785=701;var oo1784=344;var oo1783=463;var oo1782=96;var oo1781=18;var oo1780=95;var oo1779=343;var oo1778=478;var oo1777=920;var oo1776=1525;var oo1775=2713;var oo1774=5932;var oo1773=12087;var oo1772=24754;var oo1771=49359;var oo1770=1;var oo1769=17;var oo1768=82;var oo1767=95;var oo1766=172;var oo1765=343;var oo1764=451;var oo1763=463;var oo1762=478;var oo1761=513;var oo1760=552;var oo1759=604;var oo1758=701;var oo1757=788;var oo1756=828;var oo1755=920;var oo1754=965;var oo1753=977;var oo1752=1101;var oo1751=1126;var oo1750=1194;var oo1749=1258;var oo1748=1286;var oo1747=1314;var oo1746=1338;var oo1745=1415;var oo1744=1475;var oo1743=1492;var oo1742=1500;var oo1741=1525;var oo1740=1644;var oo1739=1660;var oo1738=1729;var oo1737=1768;var oo1736=1800;var oo1735=1820;var oo1734=1828;var oo1733=1857;var oo1732=1879;var oo1731=1977;var oo1730=1987;var oo1729=1992;var oo1728=2112;var oo1727=2128;var oo1726=2144;var oo1725=2147;var oo1724=2157;var oo1723=2264;var oo1722=2267;var oo1721=2362;var oo1720=2466;var oo1719=2476;var oo1718=2580;var oo1717=2592;var oo1716=2681;var oo1715=2694;var oo1714=2700;var oo1713=2713;var oo1712=2731;var oo1711=2795;var oo1710=2798;var oo1709=2823;var oo1708=2887;var oo1707=2956;var oo1706=2968;var oo1705=2997;var oo1704=3108;var oo1703=3171;var oo1702=3264;var oo1701=3410;var oo1700=3428;var oo1699=3505;var oo1698=3788;var oo1697=3823;var oo1696=3831;var oo1695=3888;var oo1694=3904;var oo1693=4019;var oo1692=4023;var oo1691=4057;var oo1690=4098;var oo1689=4117;var oo1688=4178;var oo1687=4185;var oo1686=4201;var oo1685=4205;var oo1684=4208;var oo1683=4299;var oo1682=4349;var oo1681=4458;var oo1680=4492;var oo1679=4728;var oo1678=4753;var oo1677=4762;var oo1676=4852;var oo1675=4960;var oo1674=5044;var oo1673=5109;var oo1672=5191;var oo1671=5272;var oo1670=5292;var oo1669=5351;var oo1668=5352;var oo1667=5449;var oo1666=5474;var oo1665=5503;var oo1664=5505;var oo1663=5715;var oo1662=5745;var oo1661=5889;var oo1660=5901;var oo1659=5910;var oo1658=5932;var oo1657=5939;var oo1656=5972;var oo1655=5996;var oo1654=6103;var oo1653=6117;var oo1652=6129;var oo1651=6202;var oo1650=6248;var oo1649=6282;var oo1648=6310;var oo1647=6405;var oo1646=6432;var oo1645=6524;var oo1644=6914;var oo1643=6934;var oo1642=6935;var oo1641=6945;var oo1640=6953;var oo1639=7020;var oo1638=7188;var oo1637=7212;var oo1636=7237;var oo1635=7317;var oo1634=7369;var oo1633=7433;var oo1632=7481;var oo1631=7486;var oo1630=7580;var oo1629=7598;var oo1628=7701;var oo1627=7744;var oo1626=7838;var oo1625=7959;var oo1624=8029;var oo1623=8227;var oo1622=8248;var oo1621=8340;var oo1620=8500;var oo1619=8600;var oo1618=8746;var oo1617=8750;var oo1616=8753;var oo1615=8812;var oo1614=8815;var oo1613=8859;var oo1612=9110;var oo1611=9276;var oo1610=9300;var oo1609=9352;var oo1608=9359;var oo1607=9388;var oo1606=9453;var oo1605=9455;var oo1604=9561;var oo1603=9595;var oo1602=9641;var oo1601=9662;var oo1600=9683;var oo1599=9703;var oo1598=9764;var oo1597=9768;var oo1596=9863;var oo1595=9880;var oo1594=9924;var oo1593=9983;var oo1592=10015;var oo1591=10020;var oo1590=10089;var oo1589=10120;var oo1588=10124;var oo1587=10224;var oo1586=10251;var oo1585=10446;var oo1584=10504;var oo1583=10558;var oo1582=10622;var oo1581=10701;var oo1580=10738;var oo1579=10760;var oo1578=10790;var oo1577=10804;var oo1576=10925;var oo1575=10926;var oo1574=10930;var oo1573=10946;var oo1572=10968;var oo1571=11000;var oo1570=11007;var oo1569=11097;var oo1568=11165;var oo1567=11170;var oo1566=11191;var oo1565=11286;var oo1564=11290;var oo1563=11372;var oo1562=11428;var oo1561=11499;var oo1560=11555;var oo1559=11653;var oo1558=11770;var oo1557=11781;var oo1556=11792;var oo1555=11798;var oo1554=11824;var oo1553=11855;var oo1552=11949;var oo1551=11950;var oo1550=11951;var oo1549=12020;var oo1548=12049;var oo1547=12087;var oo1546=12110;var oo1545=12121;var oo1544=12171;var oo1543=12231;var oo1542=12237;var oo1541=12319;var oo1540=12397;var oo1539=12409;var oo1538=12452;var oo1537=12453;var oo1536=12583;var oo1535=12674;var oo1534=12689;var oo1533=12713;var oo1532=12824;var oo1531=12836;var oo1530=12914;var oo1529=13026;var oo1528=13080;var oo1527=13141;var oo1526=13157;var oo1525=13190;var oo1524=13256;var oo1523=13296;var oo1522=13334;var oo1521=13485;var oo1520=13505;var oo1519=13534;var oo1518=13574;var oo1517=13588;var oo1516=13592;var oo1515=13606;var oo1514=13633;var oo1513=13713;var oo1512=13763;var oo1511=13817;var oo1510=13828;var oo1509=13851;var oo1508=13939;var oo1507=14035;var oo1506=14126;var oo1505=14261;var oo1504=14285;var oo1503=14288;var oo1502=14293;var oo1501=14321;var oo1500=14353;var oo1499=14379;var oo1498=14453;var oo1497=14510;var oo1496=14515;var oo1495=14517;var oo1494=14588;var oo1493=14685;var oo1492=14720;var oo1491=14743;var oo1490=14785;var oo1489=14809;var oo1488=15063;var oo1487=15108;var oo1486=15183;var oo1485=15253;var oo1484=15264;var oo1483=15284;var oo1482=15314;var oo1481=15353;var oo1480=15376;var oo1479=15549;var oo1478=15569;var oo1477=15713;var oo1476=15881;var oo1475=15912;var oo1474=16015;var oo1473=16041;var oo1472=16053;var oo1471=16079;var oo1470=16133;var oo1469=16207;var oo1468=16259;var oo1467=16264;var oo1466=16280;var oo1465=16287;var oo1464=16370;var oo1463=16411;var oo1462=16464;var oo1461=16530;var oo1460=16701;var oo1459=16711;var oo1458=16751;var oo1457=16788;var oo1456=16925;var oo1455=16980;var oo1454=17045;var oo1453=17078;var oo1452=17274;var oo1451=17327;var oo1450=17335;var oo1449=17402;var oo1448=17441;var oo1447=17675;var oo1446=17681;var oo1445=17824;var oo1444=18001;var oo1443=18010;var oo1442=18154;var oo1441=18250;var oo1440=18310;var oo1439=18385;var oo1438=18442;var oo1437=18462;var oo1436=18469;var oo1435=18491;var oo1434=18508;var oo1433=18510;var oo1432=18714;var oo1431=18790;var oo1430=18832;var oo1429=18850;var oo1428=18913;var oo1427=18982;var oo1426=18991;var oo1425=19046;var oo1424=19093;var oo1423=19110;var oo1422=19214;var oo1421=19269;var oo1420=19293;var oo1419=19327;var oo1418=19345;var oo1417=19347;var oo1416=19349;var oo1415=19363;var oo1414=19372;var oo1413=19381;var oo1412=19393;var oo1411=19510;var oo1410=19659;var oo1409=19676;var oo1408=19820;var oo1407=19892;var oo1406=19896;var oo1405=19922;var oo1404=20083;var oo1403=20086;var oo1402=20093;var oo1401=20167;var oo1400=20325;var oo1399=20336;var oo1398=20362;var oo1397=20381;var oo1396=20489;var oo1395=20586;var oo1394=20807;var oo1393=20852;var oo1392=20931;var oo1391=20948;var oo1390=21081;var oo1389=21090;var oo1388=21144;var oo1387=21146;var oo1386=21158;var oo1385=21160;var oo1384=21210;var oo1383=21286;var oo1382=21293;var oo1381=21367;var oo1380=21386;var oo1379=21502;var oo1378=21543;var oo1377=21557;var oo1376=21588;var oo1375=21660;var oo1374=21684;var oo1373=21724;var oo1372=21786;var oo1371=21794;var oo1370=21816;var oo1369=21839;var oo1368=22052;var oo1367=22054;var oo1366=22218;var oo1365=22237;var oo1364=22288;var oo1363=22361;var oo1362=22381;var oo1361=22385;var oo1360=22439;var oo1359=22468;var oo1358=22503;var oo1357=22505;var oo1356=22553;var oo1355=22607;var oo1354=22715;var oo1353=22859;var oo1352=22875;var oo1351=22960;var oo1350=23000;var oo1349=23019;var oo1348=23097;var oo1347=23215;var oo1346=23260;var oo1345=23295;var oo1344=23352;var oo1343=23365;var oo1342=23615;var oo1341=23627;var oo1340=23732;var oo1339=23770;var oo1338=23895;var oo1337=23941;var oo1336=23997;var oo1335=24066;var oo1334=24173;var oo1333=24215;var oo1332=24227;var oo1331=24231;var oo1330=24413;var oo1329=24489;var oo1328=24657;var oo1327=24666;var oo1326=24754;var oo1325=24808;var oo1324=24952;var oo1323=24972;var oo1322=24974;var oo1321=25111;var oo1320=25115;var oo1319=25186;var oo1318=25269;var oo1317=25427;var oo1316=25492;var oo1315=25715;var oo1314=25724;var oo1313=25887;var oo1312=25890;var oo1311=25915;var oo1310=25960;var oo1309=25964;var oo1308=25987;var oo1307=26028;var oo1306=26108;var oo1305=26266;var oo1304=26326;var oo1303=26354;var oo1302=26375;var oo1301=26470;var oo1300=26520;var oo1299=26550;var oo1298=26645;var oo1297=26647;var oo1296=26714;var oo1295=26785;var oo1294=26815;var oo1293=26960;var oo1292=27026;var oo1291=27029;var oo1290=27229;var oo1289=27367;var oo1288=27369;var oo1287=27447;var oo1286=27458;var oo1285=27471;var oo1284=27508;var oo1283=27514;var oo1282=27580;var oo1281=27615;var oo1280=27645;var oo1279=27664;var oo1278=27691;var oo1277=27705;var oo1276=27755;var oo1275=27777;var oo1274=27802;var oo1273=27940;var oo1272=28030;var oo1271=28145;var oo1270=28273;var oo1269=28288;var oo1268=28319;var oo1267=28363;var oo1266=28404;var oo1265=28416;var oo1264=28419;var oo1263=28434;var oo1262=28489;var oo1261=28548;var oo1260=28602;var oo1259=28616;var oo1258=28669;var oo1257=28718;var oo1256=28741;var oo1255=28743;var oo1254=28796;var oo1253=28861;var oo1252=28902;var oo1251=29083;var oo1250=29108;var oo1249=29195;var oo1248=29293;var oo1247=29298;var oo1246=29424;var oo1245=29660;var oo1244=29678;var oo1243=29697;var oo1242=29779;var oo1241=29809;var oo1240=29815;var oo1239=29849;var oo1238=29878;var oo1237=29904;var oo1236=29915;var oo1235=29987;var oo1234=30014;var oo1233=30053;var oo1232=30056;var oo1231=30099;var oo1230=30108;var oo1229=30124;var oo1228=30187;var oo1227=30196;var oo1226=30203;var oo1225=30319;var oo1224=30478;var oo1223=30541;var oo1222=30554;var oo1221=30578;var oo1220=30621;var oo1219=30624;var oo1218=30625;var oo1217=30657;var oo1216=30693;var oo1215=30842;var oo1214=30906;var oo1213=30910;var oo1212=30969;var oo1211=31051;var oo1210=31067;var oo1209=31069;var oo1208=31163;var oo1207=31238;var oo1206=31572;var oo1205=31581;var oo1204=31617;var oo1203=31786;var oo1202=31848;var oo1201=31976;var oo1200=32025;var oo1199=32126;var oo1198=32236;var oo1197=32351;var oo1196=32367;var oo1195=32512;var oo1194=32536;var oo1193=32539;var oo1192=32566;var oo1191=32569;var oo1190=32635;var oo1189=32715;var oo1188=32865;var oo1187=32878;var oo1186=32916;var oo1185=32954;var oo1184=32994;var oo1183=33009;var oo1182=33038;var oo1181=33216;var oo1180=33222;var oo1179=33245;var oo1178=33310;var oo1177=33406;var oo1176=33442;var oo1175=33585;var oo1174=33666;var oo1173=33717;var oo1172=33781;var oo1171=33812;var oo1170=33840;var oo1169=33928;var oo1168=34000;var oo1167=34018;var oo1166=34026;var oo1165=34041;var oo1164=34049;var oo1163=34089;var oo1162=34170;var oo1161=34180;var oo1160=34238;var oo1159=34348;var oo1158=34416;var oo1157=34625;var oo1156=34701;var oo1155=34737;var oo1154=34739;var oo1153=34792;var oo1152=34851;var oo1151=34930;var oo1150=34944;var oo1149=34969;var oo1148=35275;var oo1147=35304;var oo1146=35343;var oo1145=35356;var oo1144=35429;var oo1143=35484;var oo1142=35577;var oo1141=35618;var oo1140=35654;var oo1139=35731;var oo1138=35837;var oo1137=36025;var oo1136=36026;var oo1135=36106;var oo1134=36227;var oo1133=36323;var oo1132=36368;var oo1131=36404;var oo1130=36411;var oo1129=36503;var oo1128=36547;var oo1127=36561;var oo1126=36568;var oo1125=36692;var oo1124=36713;var oo1123=36903;var oo1122=36951;var oo1121=37002;var oo1120=37013;var oo1119=37016;var oo1118=37186;var oo1117=37199;var oo1116=37211;var oo1115=37222;var oo1114=37288;var oo1113=37357;var oo1112=37673;var oo1111=37737;var oo1110=37744;var oo1109=37832;var oo1108=37888;var oo1107=37894;var oo1106=37906;var oo1105=38023;var oo1104=38032;var oo1103=38117;var oo1102=38224;var oo1101=38302;var oo1100=38423;var oo1099=38452;var oo1098=38491;var oo1097=38571;var oo1096=38580;var oo1095=38646;var oo1094=38675;var oo1093=38765;var oo1092=38780;var oo1091=38823;var oo1090=38865;var oo1089=39053;var oo1088=39082;var oo1087=39225;var oo1086=39226;var oo1085=39406;var oo1084=39439;var oo1083=39465;var oo1082=39485;var oo1081=39751;var oo1080=39784;var oo1079=39860;var oo1078=39897;var oo1077=39930;var oo1076=39987;var oo1075=39996;var oo1074=40000;var oo1073=40008;var oo1072=40036;var oo1071=40103;var oo1070=40140;var oo1069=40199;var oo1068=40217;var oo1067=40257;var oo1066=40277;var oo1065=40319;var oo1064=40332;var oo1063=40375;var oo1062=40399;var oo1061=40434;var oo1060=40457;var oo1059=40573;var oo1058=40625;var oo1057=40628;var oo1056=40720;var oo1055=40731;var oo1054=40735;var oo1053=40741;var oo1052=40756;var oo1051=40777;var oo1050=40793;var oo1049=40827;var oo1048=40830;var oo1047=40831;var oo1046=40845;var oo1045=40846;var oo1044=40891;var oo1043=41006;var oo1042=41052;var oo1041=41090;var oo1040=41096;var oo1039=41103;var oo1038=41125;var oo1037=41244;var oo1036=41276;var oo1035=41290;var oo1034=41303;var oo1033=41314;var oo1032=41325;var oo1031=41343;var oo1030=41352;var oo1029=41423;var oo1028=41497;var oo1027=41598;var oo1026=41646;var oo1025=41666;var oo1024=41721;var oo1023=42053;var oo1022=42071;var oo1021=42124;var oo1020=42125;var oo1019=42291;var oo1018=42321;var oo1017=42331;var oo1016=42344;var oo1015=42429;var oo1014=42433;var oo1013=42458;var oo1012=42621;var oo1011=42801;var oo1010=42870;var oo1009=42872;var oo1008=42883;var oo1007=42888;var oo1006=42913;var oo1005=42956;var oo1004=42991;var oo1003=43002;var oo1002=43015;var oo1001=43022;var oo1000=43028;var oo999=43154;var oo998=43178;var oo997=43233;var oo996=43238;var oo995=43241;var oo994=43286;var oo993=43570;var oo992=43637;var oo991=43668;var oo990=43752;var oo989=43833;var oo988=43838;var oo987=43847;var oo986=43953;var oo985=44064;var oo984=44119;var oo983=44203;var oo982=44215;var oo981=44222;var oo980=44238;var oo979=44248;var oo978=44459;var oo977=44492;var oo976=44498;var oo975=44596;var oo974=44601;var oo973=44649;var oo972=44652;var oo971=44774;var oo970=44799;var oo969=44838;var oo968=44863;var oo967=44925;var oo966=44997;var oo965=45053;var oo964=45071;var oo963=45099;var oo962=45161;var oo961=45227;var oo960=45243;var oo959=45256;var oo958=45402;var oo957=45464;var oo956=45493;var oo955=45525;var oo954=45668;var oo953=45692;var oo952=45701;var oo951=45765;var oo950=45769;var oo949=45823;var oo948=45887;var oo947=45936;var oo946=45958;var oo945=45969;var oo944=46230;var oo943=46358;var oo942=46362;var oo941=46393;var oo940=46431;var oo939=46464;var oo938=46482;var oo937=46513;var oo936=46535;var oo935=46546;var oo934=46655;var oo933=46696;var oo932=46720;var oo931=46748;var oo930=46773;var oo929=46826;var oo928=46881;var oo927=46987;var oo926=46989;var oo925=46995;var oo924=47005;var oo923=47044;var oo922=47123;var oo921=47175;var oo920=47197;var oo919=47257;var oo918=47450;var oo917=47467;var oo916=47473;var oo915=47528;var oo914=47541;var oo913=47566;var oo912=47616;var oo911=47625;var oo910=47706;var oo909=47718;var oo908=47761;var oo907=47792;var oo906=47861;var oo905=47904;var oo904=47906;var oo903=48012;var oo902=48113;var oo901=48185;var oo900=48373;var oo899=48378;var oo898=48471;var oo897=48564;var oo896=48581;var oo895=48583;var oo894=48709;var oo893=48796;var oo892=48863;var oo891=48910;var oo890=48961;var oo889=48965;var oo888=49058;var oo887=49109;var oo886=49185;var oo885=49221;var oo884=49359;var oo883=49364;var oo882=49380;var oo881=49386;var oo880=49570;var oo879=49575;var oo878=49592;var oo877=49609;var oo876=49672;var oo875=49831;var oo874=49865;var oo873=49904;var oo872=49979;var oo871=49988;var oo870=50019;var oo869=50086;var oo868=50136;var oo867=50248;var oo866=50253;var oo865=50286;var oo864=50339;var oo863=50342;var oo862=50359;var oo861=50397;var oo860=50603;var oo859=50615;var oo858=50626;var oo857=50640;var oo856=50663;var oo855=50671;var oo854=50738;var oo853=50786;var oo852=50811;var oo851=50856;var oo850=50927;var oo849=50933;var oo848=51011;var oo847=51022;var oo846=51053;var oo845=51073;var oo844=51122;var oo843=51219;var oo842=51329;var oo841=51407;var oo840=51410;var oo839=51420;var oo838=51511;var oo837=51541;var oo836=51620;var oo835=51705;var oo834=51826;var oo833=52057;var oo832=52164;var oo831=52387;var oo830=52482;var oo829=52508;var oo828=52684;var oo827=52731;var oo826=52751;var oo825=52778;var oo824=52823;var oo823=52880;var oo822=52886;var oo821=52926;var oo820=52940;var oo819=52984;var oo818=52987;var oo817=52994;var oo816=53000;var oo815=53010;var oo814=53205;var oo813=53210;var oo812=53309;var oo811=53320;var oo810=53368;var oo809=53502;var oo808=53519;var oo807=53520;var oo806=53574;var oo805=53632;var oo804=53877;var oo803=54097;var oo802=54234;var oo801=54239;var oo800=54271;var oo799=54324;var oo798=54331;var oo797=54348;var oo796=54441;var oo795=54448;var oo794=54529;var oo793=54568;var oo792=54628;var oo791=54703;var oo790=54718;var oo789=54901;var oo788=54964;var oo787=54996;var oo786=55007;var oo785=55019;var oo784=55069;var oo783=55151;var oo782=55210;var oo781=55281;var oo780=55376;var oo779=55402;var oo778=55502;var oo777=55553;var oo776=55644;var oo775=55672;var oo774=55698;var oo773=55735;var oo772=55826;var oo771=55837;var oo770=55845;var oo769=55886;var oo768=55961;var oo767=56001;var oo766=56022;var oo765=56180;var oo764=56328;var oo763=56335;var oo762=56572;var oo761=56584;var oo760=56594;var oo759=56712;var oo758=56812;var oo757=57064;var oo756=57126;var oo755=57182;var oo754=57190;var oo753=57221;var oo752=57258;var oo751=57335;var oo750=57338;var oo749=57396;var oo748=57397;var oo747=57413;var oo746=57420;var oo745=57422;var oo744=57446;var oo743=57485;var oo742=57492;var oo741=57563;var oo740=57661;var oo739=57870;var oo738=58001;var oo737=58046;var oo736=58168;var oo735=58326;var oo734=58382;var oo733=58383;var oo732=58386;var oo731=58404;var oo730=58443;var oo729=58448;var oo728=58470;var oo727=58539;var oo726=58575;var oo725=58582;var oo724=58619;var oo723=58620;var oo722=58731;var oo721=58811;var oo720=58815;var oo719=58850;var oo718=58904;var oo717=58907;var oo716=58949;var oo715=59324;var oo714=59347;var oo713=59460;var oo712=59533;var oo711=59553;var oo710=59665;var oo709=59669;var oo708=59696;var oo707=59723;var oo706=59812;var oo705=59823;var oo704=59858;var oo703=59891;var oo702=59933;var oo701=60014;var oo700=60093;var oo699=60177;var oo698=60267;var oo697=60270;var oo696=60278;var oo695=60400;var oo694=60504;var oo693=60508;var oo692=60598;var oo691=60609;var oo690=60664;var oo689=60667;var oo688=60701;var oo687=60721;var oo686=60863;var oo685=60886;var oo684=60902;var oo683=60919;var oo682=61090;var oo681=61142;var oo680=61159;var oo679=61194;var oo678=61221;var oo677=61273;var oo676=61295;var oo675=61386;var oo674=61465;var oo673=61472;var oo672=61537;var oo671=61567;var oo670=61584;var oo669=61626;var oo668=61633;var oo667=61672;var oo666=61751;var oo665=61949;var oo664=62149;var oo663=62213;var oo662=62250;var oo661=62253;var oo660=62360;var oo659=62365;var oo658=62423;var oo657=62466;var oo656=62574;var oo655=62653;var oo654=62678;var oo653=62749;var oo652=62836;var oo651=62858;var oo650=62922;var oo649=62989;var oo648=63158;var oo647=63168;var oo646=63221;var oo645=63317;var oo644=63365;var oo643=63390;var oo642=63488;var oo641=63503;var oo640=63605;var oo639=63674;var oo638=63703;var oo637=63811;var oo636=63907;var oo635=63915;var oo634=64004;var oo633=64093;var oo632=64179;var oo631=64224;var oo630=64339;var oo629=64342;var oo628=64384;var oo627=64391;var oo626=64403;var oo625=64560;var oo624=64570;var oo623=64615;var oo622=64787;var oo621=64837;var oo620=64845;var oo619=65035;var oo618=65146;var oo617=65170;var oo616=65205;var oo615=65568;var oo614=65652;var oo613=65663;var oo612=65843;var oo611=65913;var oo610=65966;var oo609=66001;var oo608=66005;var oo607=66070;var oo606=66102;var oo605=66136;var oo604=66285;var oo603=66498;var oo602=66522;var oo601=66555;var oo600=66591;var oo599=66711;var oo598=66735;var oo597=66775;var oo596=66789;var oo595=66829;var oo594=66902;var oo593=66923;var oo592=67028;var oo591=67053;var oo590=67057;var oo589=67087;var oo588=67122;var oo587=67240;var oo586=67423;var oo585=67488;var oo584=67559;var oo583=67604;var oo582=67609;var oo581=67635;var oo580=67732;var oo579=67763;var oo578=67826;var oo577=67828;var oo576=67834;var oo575=67917;var oo574=68050;var oo573=68095;var oo572=68107;var oo571=68252;var oo570=68302;var oo569=68307;var oo568=68311;var oo567=68622;var oo566=68626;var oo565=68712;var oo564=68738;var oo563=68810;var oo562=68818;var oo561=68839;var oo560=68901;var oo559=68909;var oo558=68940;var oo557=68986;var oo556=69043;var oo555=69076;var oo554=69124;var oo553=69138;var oo552=69201;var oo551=69332;var oo550=69344;var oo549=69395;var oo548=69437;var oo547=69439;var oo546=69555;var oo545=69610;var oo544=69615;var oo543=69738;var oo542=69796;var oo541=69897;var oo540=70007;var oo539=70092;var oo538=70111;var oo537=70202;var oo536=70256;var oo535=70287;var oo534=70297;var oo533=70302;var oo532=70409;var oo531=70484;var oo530=70581;var oo529=70620;var oo528=70624;var oo527=70675;var oo526=70729;var oo525=70953;var oo524=70959;var oo523=71005;var oo522=71057;var oo521=71104;var oo520=71139;var oo519=71145;var oo518=71200;var oo517=71255;var oo516=71308;var oo515=71340;var oo514=71383;var oo513=71438;var oo512=71439;var oo511=71478;var oo510=71593;var oo509=71617;var oo508=71652;var oo507=71661;var oo506=71711;var oo505=71766;var oo504=71781;var oo503=71787;var oo502=71825;var oo501=71865;var oo500=72036;var oo499=72056;var oo498=72179;var oo497=72186;var oo496=72245;var oo495=72263;var oo494=72389;var oo493=72398;var oo492=72429;var oo491=72456;var oo490=72613;var oo489=72618;var oo488=72753;var oo487=72772;var oo486=72804;var oo485=72863;var oo484=72988;var oo483=73043;var oo482=73059;var oo481=73202;var oo480=73215;var oo479=73416;var oo478=73421;var oo477=73591;var oo476=73670;var oo475=73674;var oo474=73693;var oo473=73695;var oo472=73711;var oo471=73750;var oo470=73787;var oo469=73822;var oo468=73914;var oo467=74035;var oo466=74047;var oo465=74066;var oo464=74084;var oo463=74131;var oo462=74158;var oo461=74183;var oo460=74214;var oo459=74411;var oo458=74438;var oo457=74443;var oo456=74543;var oo455=74663;var oo454=74753;var oo453=74780;var oo452=74905;var oo451=74917;var oo450=74962;var oo449=74993;var oo448=75055;var oo447=75112;var oo446=75166;var oo445=75170;var oo444=75238;var oo443=75286;var oo442=75338;var oo441=75409;var oo440=75425;var oo439=75430;var oo438=75436;var oo437=75453;var oo436=75672;var oo435=75712;var oo434=75773;var oo433=75836;var oo432=75935;var oo431=75964;var oo430=75979;var oo429=75992;var oo428=76039;var oo427=76145;var oo426=76224;var oo425=76385;var oo424=76468;var oo423=76518;var oo422=76608;var oo421=76624;var oo420=76648;var oo419=76660;var oo418=76669;var oo417=76730;var oo416=76815;var oo415=76835;var oo414=76874;var oo413=76923;var oo412=76985;var oo411=76996;var oo410=77074;var oo409=77207;var oo408=77229;var oo407=77271;var oo406=77313;var oo405=77333;var oo404=77499;var oo403=77510;var oo402=77649;var oo401=77651;var oo400=77661;var oo399=77711;var oo398=77828;var oo397=77866;var oo396=77890;var oo395=77916;var oo394=77972;var oo393=78005;var oo392=78038;var oo391=78041;var oo390=78153;var oo389=78165;var oo388=78419;var oo387=78429;var oo386=78448;var oo385=78598;var oo384=78630;var oo383=78655;var oo382=78686;var oo381=78763;var oo380=78764;var oo379=78865;var oo378=78887;var oo377=78938;var oo376=78996;var oo375=79005;var oo374=79056;var oo373=79067;var oo372=79124;var oo371=79137;var oo370=79209;var oo369=79457;var oo368=79494;var oo367=79509;var oo366=79601;var oo365=79635;var oo364=79653;var oo363=79675;var oo362=79763;var oo361=79904;var oo360=79989;var oo359=80036;var oo358=80153;var oo357=80217;var oo356=80220;var oo355=80266;var oo354=80323;var oo353=80469;var oo352=80555;var oo351=80609;var oo350=80676;var oo349=80736;var oo348=80820;var oo347=80829;var oo346=80831;var oo345=80835;var oo344=80999;var oo343=81045;var oo342=81076;var oo341=81177;var oo340=81207;var oo339=81212;var oo338=81223;var oo337=81239;var oo336=81331;var oo335=81347;var oo334=81364;var oo333=81389;var oo332=81394;var oo331=81432;var oo330=81462;var oo329=81533;var oo328=81640;var oo327=81764;var oo326=82010;var oo325=82018;var oo324=82047;var oo323=82147;var oo322=82283;var oo321=82333;var oo320=82405;var oo319=82409;var oo318=82480;var oo317=82493;var oo316=82680;var oo315=82783;var oo314=82848;var oo313=82871;var oo312=82945;var oo311=82950;var oo310=82991;var oo309=83096;var oo308=83311;var oo307=83428;var oo306=83438;var oo305=83457;var oo304=83522;var oo303=83608;var oo302=83718;var oo301=83752;var oo300=83833;var oo299=83837;var oo298=83861;var oo297=83904;var oo296=83912;var oo295=83923;var oo294=83964;var oo293=84008;var oo292=84019;var oo291=84035;var oo290=84080;var oo289=84082;var oo288=84099;var oo287=84226;var oo286=84250;var oo285=84365;var oo284=84396;var oo283=84563;var oo282=84586;var oo281=84726;var oo280=84741;var oo279=84767;var oo278=84826;var oo277=84829;var oo276=84860;var oo275=84917;var oo274=84949;var oo273=85012;var oo272=85032;var oo271=85110;var oo270=85182;var oo269=85278;var oo268=85296;var oo267=85337;var oo266=85369;var oo265=85410;var oo264=85430;var oo263=85495;var oo262=85496;var oo261=85598;var oo260=85610;var oo259=85619;var oo258=85697;var oo257=85708;var oo256=85879;var oo255=86011;var oo254=86018;var oo253=86028;var oo252=86058;var oo251=86140;var oo250=86210;var oo249=86410;var oo248=86462;var oo247=86614;var oo246=86670;var oo245=87050;var oo244=87090;var oo243=87117;var oo242=87158;var oo241=87161;var oo240=87287;var oo239=87322;var oo238=87326;var oo237=87365;var oo236=87391;var oo235=87456;var oo234=87472;var oo233=87490;var oo232=87491;var oo231=87519;var oo230=87551;var oo229=87630;var oo228=87727;var oo227=87779;var oo226=87788;var oo225=87892;var oo224=88069;var oo223=88100;var oo222=88184;var oo221=88284;var oo220=88285;var oo219=88290;var oo218=88304;var oo217=88393;var oo216=88416;var oo215=88461;var oo214=88520;var oo213=88542;var oo212=88559;var oo211=88695;var oo210=88718;var oo209=88750;var oo208=88816;var oo207=88820;var oo206=88832;var oo205=88838;var oo204=88893;var oo203=88948;var oo202=88966;var oo201=89033;var oo200=89043;var oo199=89099;var oo198=89136;var oo197=89150;var oo196=89203;var oo195=89442;var oo194=89447;var oo193=89592;var oo192=89610;var oo191=89659;var oo190=89680;var oo189=89716;var oo188=89801;var oo187=89852;var oo186=89925;var oo185=90133;var oo184=90188;var oo183=90204;var oo182=90240;var oo181=90325;var oo180=90439;var oo179=90471;var oo178=90479;var oo177=90594;var oo176=90647;var oo175=90670;var oo174=90732;var oo173=90910;var oo172=90983;var oo171=91061;var oo170=91081;var oo169=91116;var oo168=91243;var oo167=91361;var oo166=91368;var oo165=91385;var oo164=91484;var oo163=91539;var oo162=91543;var oo161=91558;var oo160=91584;var oo159=91592;var oo158=91635;var oo157=91672;var oo156=91698;var oo155=91706;var oo154=91721;var oo153=91758;var oo152=91907;var oo151=91948;var oo150=91993;var oo149=92070;var oo148=92141;var oo147=92142;var oo146=92234;var oo145=92253;var oo144=92283;var oo143=92310;var oo142=92367;var oo141=92428;var oo140=92462;var oo139=92472;var oo138=92484;var oo137=92486;var oo136=92615;var oo135=92665;var oo134=92737;var oo133=92857;var oo132=93001;var oo131=93006;var oo130=93007;var oo129=93063;var oo128=93128;var oo127=93185;var oo126=93217;var oo125=93258;var oo124=93268;var oo123=93271;var oo122=93302;var oo121=93319;var oo120=93417;var oo119=93509;var oo118=93528;var oo117=93694;var oo116=93714;var oo115=93757;var oo114=93769;var oo113=93780;var oo112=93829;var oo111=94078;var oo110=94082;var oo109=94108;var oo108=94182;var oo107=94197;var oo106=94242;var oo105=94287;var oo104=94292;var oo103=94341;var oo102=94364;var oo101=94449;var oo100=94528;var oo99=94591;var oo98=94617;var oo97=94650;var oo96=94764;var oo95=94841;var oo94=94877;var oo93=94878;var oo92=94918;var oo91=94964;var oo90=94999;var oo89=95018;var oo88=95101;var oo87=95121;var oo86=95239;var oo85=95268;var oo84=95293;var oo83=95326;var oo82=95374;var oo81=95390;var oo80=95523;var oo79=95576;var oo78=95661;var oo77=95729;var oo76=95737;var oo75=95806;var oo74=95833;var oo73=95934;var oo72=95936;var oo71=95955;var oo70=95974;var oo69=96097;var oo68=96108;var oo67=96120;var oo66=96121;var oo65=96137;var oo64=96327;var oo63=96339;var oo62=96357;var oo61=96423;var oo60=96479;var oo59=96542;var oo58=96556;var oo57=96581;var oo56=96602;var oo55=96730;var oo54=96778;var oo53=96856;var oo52=96868;var oo51=96871;var oo50=96905;var oo49=96914;var oo48=97012;var oo47=97105;var oo46=97196;var oo45=97205;var oo44=97228;var oo43=97256;var oo42=97348;var oo41=97472;var oo40=97495;var oo39=97605;var oo38=97796;var oo37=97820;var oo36=97895;var oo35=97927;var oo34=98065;var oo33=98105;var oo32=98108;var oo31=98145;var oo30=98259;var oo29=98291;var oo28=98319;var oo27=98487;var oo26=98549;var oo25=98557;var oo24=98577;var oo23=98660;var oo22=98704;var oo21=98823;var oo20=98844;var oo19=98963;var oo18=98986;var oo17=99041;var oo16=99240;var oo15=99255;var oo14=99268;var oo13=99518;var oo12=99532;var oo11=99534;var oo10=99543;var oo9=99573;var oo8=99595;var oo7=99623;var oo6=99741;var oo5=99811;var oo4=99819;var oo3=99900;var oo2=99910;var oo1=99941;var oo0=99960;cbb_jsvmp=function(all,duei,start,shuz,argsList,ogg,op){function getproto(s,d,e){let dt=s;for(;1==1;){if(s.hasOwnProperty(d)){try{s[d]=e;}catch(e2){this[d]=e;return;}break;}else{s=s.__proto__;if(s==undefined||s==null){window[d]=e;return;}else{a9=9;}}}}if(op!==undefined){var allthis;allthis=op['allthis'];duei=op.duei;all=op.all;shuz=op.shuz;argsList=op.argsList;var a1,a2,a3,a4,a5,a6,a7,a8,a9,j,j2,i;a7=op.a7;var args=op.args;var cbbb=op.cbbb;}else{var allthis;ogg!==undefined?allthis=ogg:allthis=all;var a1,a2,a3,a4,a5,a6,a7,a8,a9,j,j2,i;var args=[];var cbbb=all;}var cbb0000006567=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004130=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008445=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007909=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002696=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002864=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003156=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003255=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003371=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003407=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003196=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007726=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005336=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007843=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000333=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000130=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003864=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004003=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001021=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005738=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002989=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004975=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007618=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006251=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005334=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001762=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007274=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001744=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000371=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000599=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005065=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005714=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006428=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001013=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008069=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000769=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008402=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006443=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001712=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008431=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006591=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007578=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003679=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001717=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006879=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006733=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002280=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003180=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001317=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003881=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004636=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000305=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007646=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000684=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001527=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000581=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007371=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006024=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005701=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006722=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003078=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006041=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005699=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006468=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003844=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001676=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001835=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006666=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002494=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004240=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004851=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003595=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000254=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005836=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001893=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002001=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004696=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001290=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005001=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004852=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002437=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007834=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006788=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002817=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004383=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005055=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005409=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007527=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001505=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004108=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000092=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006952=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004941=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004537=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003111=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004966=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002193=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001394=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005758=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008293=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb000000529=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004939=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003591=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003912=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005041=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007986=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008062=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005221=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000885=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001874=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000790=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005917=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005997=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006222=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005003=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001594=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001884=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008439=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007129=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003409=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001341=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000321=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007920=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005983=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000976=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000606=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001307=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001774=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007871=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000578=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004373=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003977=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000546=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006187=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003990=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004567=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004593=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001566=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003207=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006552=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000245=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006559=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007789=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006736=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007389=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004680=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002541=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002462=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006775=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000754=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003231=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003270=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001052=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006856=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002996=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000571=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007323=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001913=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008274=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000002620=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007797=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000330=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005706=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002302=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003429=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007891=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004486=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006848=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005533=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001814=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000251=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005074=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002923=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001216=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000804=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003125=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005327=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003552=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005481=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000489=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008068=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001326=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005165=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005831=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003273=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005322=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003352=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000284=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002026=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000338=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004516=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008209=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000520=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001592=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008411=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005689=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007184=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000163=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004999=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001980=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002117=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004002=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002268=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004429=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004998=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008302=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000004601=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006188=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002064=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003745=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003506=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004685=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004279=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002072=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003899=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007583=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008001=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004916=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001191=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004726=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004294=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004985=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003637=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002995=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000649=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004692=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000512=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007820=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003973=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005219=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002303=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002389=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006484=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005126=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007470=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001441=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001934=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003951=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005963=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002569=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003513=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001134=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007776=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004457=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003326=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000719=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002066=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003208=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007314=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003171=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003865=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000311=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007609=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003048=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003183=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002910=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002366=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002830=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000676=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000808=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002206=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003158=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007710=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005811=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003182=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007877=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006967=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005774=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002767=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001383=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007700=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001578=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001990=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005458=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000475=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003890=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008233=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004718=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005864=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005574=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005242=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008105=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006042=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007034=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007491=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007164=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007740=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005205=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001004=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005452=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004812=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003768=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006764=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001142=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000742=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003656=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002707=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000225=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002239=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003410=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002487=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002509=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000015=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007904=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002784=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002967=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008094=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008245=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004056=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007839=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000548=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000768=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003687=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007921=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004336=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003911=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000714=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006564=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000039=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006669=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002282=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007313=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004913=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006229=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000785=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001579=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007377=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003619=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002574=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000972=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006838=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006858=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003420=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002463=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001318=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007501=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001250=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002870=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007508=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005660=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007521=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004969=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003144=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005102=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001823=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000237=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002164=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003689=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001244=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005712=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007442=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005994=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003846=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001197=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001923=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000812=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003399=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002418=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000391=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001771=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001455=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006102=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006813=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004490=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001646=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003127=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002467=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003567=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003825=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005311=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006723=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004497=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002318=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001123=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002556=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006223=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000637=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004244=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000625=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004937=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008231=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001792=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006637=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005397=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002664=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000887=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004446=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007893=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006086=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003085=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005373=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006712=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005026=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007027=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001154=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003456=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004216=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005523=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007324=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004078=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002893=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004890=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001606=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001645=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006016=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001961=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006048=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001369=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006538=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002255=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004739=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006234=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006240=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002667=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004376=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000586=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004340=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008444=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000832=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001621=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006964=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002577=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007361=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000447=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005260=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003507=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007868=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007568=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001708=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005362=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008201=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005430=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007731=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003282=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005605=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001516=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008037=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002554=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005751=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004463=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001601=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005179=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007937=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002023=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004774=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004905=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001101=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001137=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006011=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005565=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001036=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002256=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002650=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002481=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006969=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003133=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005887=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005410=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003839=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001977=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006946=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001040=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003648=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005710=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005297=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002738=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001587=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005196=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000500=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004127=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003011=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006219=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004584=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005856=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004375=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007751=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003681=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003936=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002506=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006749=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001806=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006182=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000908=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007811=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007863=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007964=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005402=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005807=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003057=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006790=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005382=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000147=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008384=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002812=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003955=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005558=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003067=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008362=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004977=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002847=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004775=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000615=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006282=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004194=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003106=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005016=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001898=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004092=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002641=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000557=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003999=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003803=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007966=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004394=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000946=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002396=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008099=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003789=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000133=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001033=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004181=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003810=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007728=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004973=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006285=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004138=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005129=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006777=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002714=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000577=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006072=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005729=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002382=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007257=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008353=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007784=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005854=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003884=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002529=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006891=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000153=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007663=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007944=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003199=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005505=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006836=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001791=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003966=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002536=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005155=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004964=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006583=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001421=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000605=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000787=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001871=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000846=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006756=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000115=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006091=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006326=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003791=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008347=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008227=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006274=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007598=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003960=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002087=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006293=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005404=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006026=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005773=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001832=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001715=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004176=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003742=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002898=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004591=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000941=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003753=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003531=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001581=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002124=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003002=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002238=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004940=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006677=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006425=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002596=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003377=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000110=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003512=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007282=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005199=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002047=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004316=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004643=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005303=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005631=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004857=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003252=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000516=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008278=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000001998=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003979=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003696=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006629=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003032=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005350=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000894=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007620=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001966=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005504=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002433=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003157=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006252=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006760=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004001=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005431=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002892=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006204=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005223=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006454=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000174=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005435=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008338=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002027=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007995=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006033=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000441=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002869=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002289=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007104=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002181=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003143=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007599=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004997=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000506=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006833=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003539=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001237=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002343=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003614=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003317=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000861=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005907=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004096=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004751=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000060=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007067=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001103=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001585=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007942=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004322=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006100=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006497=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001477=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001438=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002902=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004088=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005384=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001107=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006452=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006581=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003278=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003908=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000409=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002798=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007024=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004659=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001964=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002960=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001072=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003861=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001195=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000327=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006239=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000964=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005920=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002415=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002523=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006648=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005646=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006960=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000498=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007315=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007779=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002228=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001659=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001096=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005475=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005225=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001279=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000079=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004102=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002780=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006228=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002627=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007517=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005727=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001750=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003984=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004780=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002112=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007713=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000686=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006193=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001411=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002896=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004603=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007631=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000051=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000809=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001508=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001160=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005921=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006725=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000438=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000184=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000484=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002250=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007879=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001775=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000442=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004886=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008183=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005923=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004267=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001282=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002547=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005838=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008113=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004976=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001305=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006359=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001840=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005258=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003937=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007295=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000915=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006191=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008226=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002460=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005231=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003568=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004443=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000121=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007733=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007170=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000353=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000825=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006149=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007374=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007458=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007927=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008285=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001090=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002339=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001213=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006039=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003387=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002478=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005944=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002531=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007488=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001942=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001299=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007338=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006547=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003703=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001353=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001551=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000171=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004292=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006046=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005080=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000393=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007002=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000117=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006356=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004239=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001707=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003422=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002466=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002983=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000647=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002045=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003313=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002639=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003438=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002126=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007564=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008061=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001350=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004529=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002823=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001194=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002053=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001975=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001141=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006417=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001798=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000128=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006670=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002628=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005961=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007187=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003878=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007166=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005735=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004360=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003289=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008177=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005863=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006792=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003412=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000852=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002442=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003367=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008122=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002436=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005609=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006841=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006929=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003779=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007912=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000528=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001485=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006267=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007795=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008398=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003809=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006623=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003524=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004111=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001541=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003219=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007186=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000838=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001400=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008427=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000752=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006395=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000181=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007948=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004693=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006941=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001267=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006668=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006635=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001177=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005681=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004098=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001660=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002758=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007160=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005930=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003411=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005119=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003475=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004614=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001261=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003805=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004082=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004250=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002781=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002414=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002959=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002036=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008364=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003725=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002235=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007330=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007512=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002794=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001373=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007215=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004596=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003029=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002854=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007641=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006720=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008352=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004958=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007402=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006047=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007343=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000705=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006200=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007604=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003851=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003949=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006810=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002829=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007565=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006799=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003110=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001409=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002180=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007523=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008172=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000900=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002411=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006158=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007688=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005338=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002242=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007993=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007857=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007721=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004505=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006603=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005879=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001634=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006323=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001029=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007252=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006110=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001819=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007767=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007803=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001089=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007500=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006930=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001442=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003224=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002849=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004282=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005246=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007070=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002998=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002981=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004888=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006787=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003821=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004165=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001189=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005926=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001494=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005253=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001636=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002901=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001150=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006358=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003159=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001248=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006550=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005808=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004645=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008146=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004346=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006829=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001109=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007192=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008246=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003298=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004532=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003527=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006306=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007556=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003798=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001200=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008161=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003819=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001088=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004931=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003307=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004638=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001385=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004995=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005509=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007042=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000882=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004782=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002597=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001642=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002098=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006724=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004710=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003926=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000806=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000671=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005096=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004289=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002799=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002586=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007869=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004624=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004061=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004425=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004278=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000186=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005524=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001042=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006311=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004829=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007463=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007053=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008028=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002030=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007110=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004580=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005647=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005644=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003416=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007703=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004402=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004545=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001758=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005461=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007650=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007306=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004906=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004308=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008048=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008369=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008083=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001843=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001198=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005947=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007690=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002485=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005376=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008053=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004801=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006457=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006970=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000322=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003417=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003280=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001604=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000385=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008045=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002708=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008034=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006864=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002962=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006630=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007702=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001656=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005058=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008361=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004712=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003948=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004514=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008189=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002880=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004255=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004919=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005846=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005520=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002139=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000638=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001794=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000403=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002658=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002836=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000406=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006139=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000483=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004386=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000448=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000220=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008312=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008287=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001157=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003403=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003301=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001053=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005614=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000350=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005388=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005883=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000347=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006645=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000592=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003620=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005834=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006920=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002508=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004777=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007947=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004327=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006101=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000628=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006414=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006730=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005158=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007925=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000199=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000344=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007862=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004023=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006982=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001880=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003547=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000205=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004137=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001376=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002233=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005898=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005319=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001009=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007251=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002301=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006840=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005848=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008096=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000636=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004837=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003573=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003066=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000975=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008085=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000072=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001070=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006586=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004262=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004813=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001797=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007522=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005844=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004673=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005592=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001204=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005151=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007855=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005942=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005486=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003148=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005588=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001113=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002957=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006651=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007168=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007689=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000621=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008268=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002252=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007451=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006853=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003174=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006721=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004688=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000543=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003744=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003446=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001845=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003858=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000383=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006476=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001521=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002521=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000886=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007717=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001675=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001453=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005007=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001001=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007611=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006123=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000379=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008239=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004592=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000780=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003549=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001298=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006146=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000660=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008033=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007763=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004361=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007785=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002862=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004493=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006914=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002022=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007557=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006151=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004393=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002278=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000949=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000266=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005436=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004618=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002308=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007087=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003904=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000888=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007412=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002080=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003492=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005792=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001106=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000613=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002202=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007460=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007051=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000190=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006250=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001514=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007518=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004554=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004951=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002135=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002808=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008277=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005210=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007173=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006587=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000561=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002059=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003341=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005127=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006557=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007069=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000390=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003515=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003005=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005310=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005289=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003297=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000839=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007637=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000566=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003782=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006081=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000328=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005817=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004242=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000799=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006453=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005562=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004157=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005739=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001632=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008176=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002291=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004286=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006811=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008097=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000573=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006821=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000694=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007202=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001049=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001143=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003827=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000874=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007822=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007749=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006426=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002797=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001787=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001330=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006608=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005012=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007263=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003292=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006808=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001607=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000711=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002115=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003972=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003799=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008015=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003261=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000395=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000580=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001568=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006781=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004417=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002986=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007113=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003447=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007490=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003855=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002190=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005934=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007147=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003329=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006175=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003743=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000802=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003463=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006912=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006201=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005541=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007340=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001102=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008378=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000969=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002899=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006352=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000913=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007572=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004203=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001264=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000947=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006915=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007660=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006157=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007821=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005238=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004732=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004779=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002152=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005154=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002079=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000048=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004120=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007729=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001597=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002750=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005692=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006415=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004225=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008022=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006744=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001401=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007617=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005641=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001147=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002207=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003227=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008328=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000001641=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002852=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005131=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002200=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006714=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007461=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002272=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005666=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004453=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001548=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001860=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003123=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002383=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002314=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002966=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002031=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002007=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004122=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002113=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000912=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000062=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003765=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003688=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002473=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006373=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000432=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006517=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001649=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003281=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001776=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006652=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004118=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001437=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007505=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008175=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004089=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002894=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007875=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000026=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000100=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006759=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007981=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000365=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007077=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003418=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005060=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000850=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006105=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002360=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006257=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000692=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007994=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002471=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004128=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001416=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001098=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006990=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005401=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007386=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004391=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003071=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006684=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001447=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007765=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007715=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000281=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003494=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008029=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004730=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001582=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001967=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007647=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005678=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005169=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007445=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001848=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005347=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003750=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004724=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005576=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002295=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002584=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006360=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007582=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005622=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000673=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006890=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006197=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006355=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003794=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002133=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003009=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000782=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006940=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007032=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005315=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000467=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002300=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003717=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003501=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003535=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006438=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004565=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001067=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005813=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008160=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003545=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005146=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007870=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003322=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007892=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006403=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001023=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000504=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004271=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006371=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002609=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003575=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007858=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006779=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002926=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004533=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004254=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000312=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006486=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003452=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008434=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003334=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000940=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000731=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000871=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002809=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002551=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000896=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006875=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005632=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002633=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006951=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001811=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002346=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000751=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001489=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008051=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000073=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006624=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004119=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004839=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006108=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006099=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002220=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002746=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008030=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000644=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001155=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004679=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002550=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004669=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008424=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002644=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007066=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000669=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000840=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004551=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005587=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007264=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003327=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003708=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002943=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007574=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005667=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007580=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008129=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006119=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003190=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001522=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007916=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002645=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001031=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006805=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001273=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002189=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003473=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005145=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002603=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004190=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002192=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002166=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007177=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007624=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000492=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004026=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004413=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001539=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005757=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000166=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006888=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003525=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002245=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005938=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006312=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006118=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007926=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007450=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001545=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001464=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004921=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002557=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000149=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000011=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006620=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006905=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000661=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000177=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000585=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008374=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003542=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002058=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007418=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008127=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003316=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008120=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002918=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001661=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005527=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005673=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008118=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007816=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001286=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006052=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002716=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003870=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005499=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001916=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004702=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007706=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002401=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007103=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005559=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002518=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000891=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007562=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005031=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003756=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004047=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000081=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006265=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007009=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006111=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002422=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006947=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005733=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005635=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005974=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002311=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001336=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006895=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001133=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001738=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007581=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004107=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005054=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006898=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007755=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006525=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002313=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004276=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008184=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007136=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008213=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003037=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002514=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002913=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007591=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003272=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003470=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002969=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004320=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003215=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001120=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003817=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002005=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001251=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003868=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005766=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002613=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004542=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000663=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004789=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000980=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008165=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004646=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006071=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002525=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000639=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006131=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003254=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000084=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008421=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002567=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007684=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004293=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002895=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003295=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000960=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000883=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002392=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006309=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001598=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003917=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007923=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001994=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007022=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004983=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006925=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002863=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001881=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007515=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007372=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006429=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003627=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006471=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002305=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004639=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004979=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002699=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004959=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006227=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002387=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007681=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006551=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001768=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002155=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008250=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007117=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004006=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006830=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008049=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003088=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003052=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006343=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005123=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003847=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004290=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003164=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005120=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000942=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000260=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005229=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001424=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006017=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002292=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001364=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001232=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007679=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000189=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006514=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006254=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008167=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001760=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002371=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002729=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004251=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004311=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007999=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005139=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002676=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000030=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007387=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007469=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005426=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004819=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002210=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005296=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005713=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004855=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007852=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006002=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008248=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007471=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001270=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006498=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008006=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002017=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002980=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005411=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003474=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002648=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000067=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004343=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001892=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007219=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003118=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000763=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005568=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002842=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008276=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003651=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007095=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000095=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002421=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006318=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007984=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002217=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001380=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001920=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002029=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004491=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005161=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002082=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002675=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004283=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007127=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001626=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000938=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003309=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006754=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006056=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000831=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006055=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002513=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002549=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008021=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007492=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000666=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000724=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005268=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003119=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007955=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001532=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007221=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007513=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005885=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006466=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000858=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005895=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004206=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006168=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007632=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003749=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002402=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005665=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002214=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002359=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004477=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002756=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000904=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006558=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000672=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003257=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005664=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006936=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006743=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005006=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005669=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006218=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008339=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005902=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001953=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007535=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006765=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001321=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007091=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000761=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007209=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001741=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004031=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002377=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004430=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000739=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003163=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007278=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000884=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002410=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002237=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001490=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008152=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003774=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007773=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006963=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001196=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007118=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004760=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003624=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005176=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000792=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008327=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007701=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004756=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005670=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007414=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004828=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000296=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005589=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005855=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000033=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006530=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005203=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008024=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004161=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004022=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001560=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000530=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001631=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001238=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001078=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007336=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005884=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007420=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003250=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007176=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000973=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004952=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007385=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007576=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002253=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003954=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001822=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004640=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008077=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005497=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001338=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002008=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007711=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004816=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003676=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001454=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003658=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000664=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004677=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006215=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006382=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005118=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002950=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004700=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001778=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007083=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007333=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008279=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003655=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001809=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007687=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004575=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008365=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000014=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001790=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008205=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000027=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004155=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000470=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007309=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003314=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002118=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000990=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007357=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001265=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003760=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007266=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001591=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004381=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004212=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001145=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006643=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007569=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004220=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006174=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004860=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003008=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001071=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002184=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003495=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005616=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006664=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002616=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007346=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001056=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001740=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008228=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000007355=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000423=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003631=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004864=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006571=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001786=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005005=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000653=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004652=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004309=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001354=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003369=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004705=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005290=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003090=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007657=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002776=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003424=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007991=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000562=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005309=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005809=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004441=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001351=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003931=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001699=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005982=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008301=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003218=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006003=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006419=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006795=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007048=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007383=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005581=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002258=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000867=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005640=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006236=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004069=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004621=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003697=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002012=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003386=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004971=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006094=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002516=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000361=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002673=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007899=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000269=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006945=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001180=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004243=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007197=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005900=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002374=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002146=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001575=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004748=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002226=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000836=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004823=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005272=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004004=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003562=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002886=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007100=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001955=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005804=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005035=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005922=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001349=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003837=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003232=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000820=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002678=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006491=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000956=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001256=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006683=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004590=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003267=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004630=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004126=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007941=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004599=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003336=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002873=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005781=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003932=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008013=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008306=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb000000123=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004911=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005999=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004896=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001124=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003877=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003161=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002173=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005984=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005301=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000657=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005820=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002111=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005416=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002443=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001929=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000430=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002491=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004183=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002324=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007479=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005141=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005202=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004401=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004481=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008415=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005195=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001639=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007917=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005998=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003378=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001168=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002906=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007395=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000775=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003894=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005024=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001268=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006600=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001973=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002771=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007979=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006184=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000620=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002592=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000136=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005943=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008271=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001803=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005107=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004536=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005093=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005593=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004166=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001176=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002130=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003004=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008263=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008202=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003812=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007005=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003804=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005090=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007474=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006752=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001761=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001629=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008171=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000049=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001397=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001114=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006741=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003497=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002446=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003589=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003921=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002077=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006903=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002057=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002329=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001163=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001614=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006145=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001185=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003983=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001915=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007510=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006996=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006539=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000264=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001812=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004202=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007819=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003483=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001252=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005340=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000196=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004219=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001911=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004037=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003943=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005439=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005308=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001495=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004086=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006035=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005578=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000619=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005812=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006006=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006794=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004898=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008409=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008132=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002156=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004142=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004168=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002933=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007592=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003383=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002687=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004809=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006802=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004893=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000267=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007088=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000594=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004827=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004437=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007004=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001344=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000193=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006054=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006404=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000680=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003083=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007342=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007359=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003391=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006709=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007207=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000342=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000445=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002718=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003444=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005178=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002748=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005067=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005992=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007349=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001410=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000513=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003384=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000602=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004957=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005282=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001366=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002172=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007615=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006857=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001208=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008103=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000126=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001432=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007683=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002419=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008247=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000007=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000496=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001487=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000378=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002538=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001883=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003027=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005108=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005148=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007666=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007217=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003574=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004313=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000652=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000070=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004578=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005878=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006078=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003599=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002497=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003138=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005865=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005257=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002353=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006917=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003662=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005784=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000725=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000229=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006860=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006092=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005995=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005298=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000786=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001903=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005359=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000670=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001697=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006258=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007242=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003423=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002128=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007478=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003051=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005389=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003080=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006324=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007467=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008217=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002033=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003274=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005313=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001672=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004080=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005352=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004124=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001668=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001164=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003719=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001153=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006561=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006143=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005919=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008086=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004991=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006618=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000224=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007281=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006280=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004509=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004218=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002114=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000090=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000099=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003850=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000909=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006728=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000315=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006461=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007725=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007707=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002400=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007162=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006773=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003296=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005291=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003100=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007307=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005073=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003086=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007777=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006518=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006935=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002161=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008355=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002000=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003811=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005177=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004749=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006397=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006814=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001996=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007421=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005036=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000523=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007708=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005815=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007311=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006166=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006321=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000054=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004392=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007886=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001030=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005059=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006699=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003381=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003197=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006734=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003830=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003245=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004424=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005076=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002811=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000718=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007016=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006021=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000226=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007294=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002765=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008303=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003445=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003177=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007439=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004559=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000993=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007046=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001891=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005604=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006986=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003896=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004153=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007817=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001118=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005737=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005208=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004619=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007972=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002785=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007519=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003287=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004922=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004270=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001889=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001914=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007621=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001856=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006303=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000539=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004154=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002486=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001950=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001488=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003277=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003220=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007099=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007533=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002563=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004600=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006812=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004297=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001345=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004588=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008429=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005099=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000817=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004695=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001187=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005502=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006141=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005620=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005174=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001745=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002505=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002517=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004734=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001815=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005525=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004507=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003022=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007895=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004579=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000258=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002150=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000421=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005909=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008414=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007417=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000351=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001937=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005639=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008238=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004192=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000200=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006682=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002215=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000749=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003026=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004874=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000677=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001615=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000675=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008336=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000007544=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006271=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000534=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005091=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005657=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005032=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007881=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002857=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001378=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005049=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002647=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005027=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003869=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002132=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002493=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005680=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007142=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006519=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006692=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005857=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004965=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004291=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001534=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001888=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006113=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003222=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001041=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002972=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001609=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007655=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002101=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001480=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005851=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005357=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001831=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001309=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000313=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000522=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001908=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006596=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002316=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005951=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006122=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007520=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005281=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005521=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006483=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001912=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008357=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004589=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005023=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003824=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005968=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001669=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001956=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005066=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006548=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004409=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004963=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005193=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002669=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006320=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008377=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002882=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000247=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007061=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004706=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003362=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002552=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002991=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001997=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003469=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006679=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008307=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000590=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003759=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004395=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000088=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005206=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003149=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007585=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003293=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006315=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002519=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003584=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001590=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003098=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000706=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005906=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001473=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008270=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000003459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002183=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007454=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001517=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004036=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003053=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005823=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004307=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000715=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003320=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001389=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002423=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001051=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001520=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000335=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005286=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005188=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007718=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006902=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006598=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007122=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006695=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001605=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007089=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007483=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002492=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001507=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005993=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005200=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001999=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004949=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007756=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005806=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006647=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001780=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004587=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006129=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006521=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005345=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006297=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004664=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004784=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006127=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000683=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002638=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002953=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001403=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006604=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001584=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008008=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006889=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005736=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000154=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003240=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001224=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004012=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008154=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006409=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002016=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005186=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000554=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007344=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005264=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003565=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001559=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007153=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004210=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008354=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002388=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004388=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003124=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007216=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007792=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001418=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007754=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004943=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002264=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002802=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001698=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003829=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008256=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001552=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003189=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007992=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000541=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007302=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004721=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000735=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004013=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003339=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003934=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007189=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002153=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002602=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003732=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003146=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005343=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007119=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003279=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003569=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003242=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001169=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002701=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005360=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003460=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000616=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005904=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000415=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003775=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001857=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007360=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006247=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005671=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008219=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001108=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006226=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006490=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007743=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002070=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003673=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003186=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002154=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000207=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005466=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000828=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000934=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007452=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005079=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001215=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003659=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005540=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007940=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006394=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001869=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005233=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000576=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004594=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003072=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008243=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001543=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002096=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001074=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002826=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001492=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004329=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008058=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005406=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004196=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007125=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005516=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004871=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006974=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006082=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002993=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001328=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006442=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005840=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002429=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000211=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000231=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006440=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002831=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006439=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002447=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001844=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002395=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001081=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002286=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005782=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004032=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003102=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005816=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004273=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007407=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006844=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005826=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000515=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004152=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004605=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003162=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003443=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006336=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002806=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006938=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003139=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006249=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004980=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004549=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000518=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008237=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000007063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001294=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002335=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005686=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006161=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000262=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003010=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005094=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005462=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007832=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000165=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001824=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005470=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007399=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000608=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003956=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006422=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005932=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007328=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006313=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003488=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004059=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006116=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008136=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001277=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007902=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001469=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007285=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007588=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003239=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000595=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003843=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000732=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007945=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002979=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005755=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002158=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000037=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000589=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003192=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002994=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003944=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001340=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004581=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005772=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001802=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003271=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008169=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005891=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001849=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008066=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003441=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002835=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007887=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008153=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000745=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002349=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005211=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004883=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004651=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002559=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001696=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003953=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000139=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008316=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007807=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008436=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006463=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003243=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004768=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007120=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005677=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001987=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001687=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003559=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001617=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001878=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000814=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003351=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002099=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000591=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004353=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001236=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004049=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006430=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002310=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004450=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005603=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000299=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000461=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004763=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004258=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005769=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000162=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004868=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001084=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002694=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007670=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006112=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002833=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008340=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004105=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005577=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001701=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005797=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003520=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006524=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006505=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008009=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006034=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001443=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005790=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005980=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005053=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001960=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005916=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002764=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004517=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004576=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001569=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003264=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001705=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001126=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003923=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003543=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007589=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007774=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008423=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005365=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001499=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004573=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006614=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001644=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000533=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007600=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003276=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005876=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001783=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001278=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001407=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000926=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002350=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001873=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002502=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005715=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000937=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002065=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001688=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008438=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003628=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003728=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000437=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005590=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007188=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005215=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004773=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000238=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005925=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001361=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001166=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005849=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005542=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001312=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006346=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002731=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005077=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002179=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002937=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006839=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004318=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001146=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001732=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007678=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007978=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007130=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003671=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006022=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003467=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003184=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001161=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008106=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005276=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006164=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006649=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005306=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007514=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006863=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007596=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006988=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003308=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007047=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001242=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000017=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001022=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000372=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001759=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002438=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006874=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005392=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005299=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005805=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004399=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002526=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003458=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004448=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008027=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005175=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000916=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001159=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003374=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003141=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008382=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004467=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001209=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004301=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005414=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005508=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000009=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005137=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006330=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006163=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003358=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004686=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000962=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006537=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000370=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005621=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003266=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004112=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001017=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006284=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006563=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002952=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005627=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007030=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002469=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006077=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006479=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003514=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006933=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007885=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005747=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006767=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002243=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007627=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002384=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003712=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007409=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004050=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003857=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006609=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001726=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007853=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003073=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001931=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006508=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002307=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005042=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005109=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007213=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006235=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000263=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005515=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004754=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004987=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003854=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002229=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007643=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004342=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006027=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003178=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006080=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006690=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004445=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003091=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001316=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002464=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005136=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005101=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002241=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001991=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006757=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005789=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004550=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001554=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000678=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002625=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007998=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001086=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001082=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004435=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000336=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004148=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006467=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000716=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005886=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001921=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006305=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007169=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007977=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001308=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004323=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001178=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001624=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001440=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003997=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003167=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007097=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001681=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005979=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005611=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008211=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003618=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005255=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002267=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002649=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000579=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002540=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007354=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004830=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005591=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003333=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006407=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005501=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003940=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000053=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007746=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006778=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000593=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001572=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008321=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007571=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006281=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000860=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003348=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001654=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004858=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007849=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002260=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007268=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001037=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004470=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008208=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006786=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006456=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007753=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004132=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006194=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006106=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002489=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000222=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003165=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003866=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001648=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004335=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005711=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006834=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005643=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006715=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004926=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004513=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006171=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007453=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000897=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004935=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000215=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003668=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005087=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007297=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001218=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007145=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005596=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007182=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008426=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007769=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005518=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001511=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002737=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007911=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006987=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000122=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002216=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005744=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000401=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005663=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000191=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002761=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004483=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006243=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004707=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000781=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007812=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005163=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005672=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005265=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000386=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003686=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007781=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003275=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000457=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006634=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001471=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002668=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003401=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006568=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007889=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005996=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002361=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002575=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000195=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006554=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005496=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002123=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004264=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001829=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000902=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001292=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002851=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008435=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000107=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004499=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001686=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006230=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001481=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007723=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006237=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006502=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004378=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006824=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000479=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008404=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005152=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004747=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007827=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003315=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000497=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008081=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006114=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001536=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000456=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005750=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004925=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001302=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005218=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007630=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003217=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004114=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007422=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000550=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006214=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007059=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004484=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002934=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007974=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000704=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004317=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005801=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004397=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007128=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003994=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000257=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007668=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007552=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005894=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003047=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005243=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001175=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001839=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001622=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006391=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008180=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006049=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006789=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004027=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004934=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003684=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007015=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002545=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002427=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003746=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002203=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000526=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000532=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005000=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007987=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001968=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003054=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003617=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003426=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004648=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007540=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003788=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005092=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003781=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004885=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008318=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004781=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007035=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004305=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002103=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007255=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007988=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006934=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004845=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005822=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001429=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003455=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000868=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007423=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006455=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000176=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006626=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004846=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001827=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007836=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007851=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001793=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007554=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004236=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001301=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003134=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008264=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004474=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008298=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000001504=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001725=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003153=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005582=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002939=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008255=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000006771=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002544=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001958=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003055=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007872=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000866=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003848=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003128=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006599=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008272=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002844=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000895=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002097=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001503=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006220=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004312=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002634=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008041=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000213=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007365=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006569=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008178=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007829=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001574=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002125=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004676=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003188=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003933=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003797=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007185=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003964=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006854=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001111=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004187=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007090=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004650=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007924=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003822=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002951=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007593=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003820=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002944=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001502=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002293=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006372=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001743=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000603=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007493=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006565=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005455=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002223=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005828=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003995=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002543=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002719=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000138=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006383=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002148=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001132=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002107=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006855=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007146=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001428=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000452=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006516=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000776=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007989=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006578=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004887=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002897=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008292=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002048=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004101=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005988=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003876=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006665=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006573=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006897=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003700=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007541=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004010=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000392=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006278=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002732=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000343=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008317=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005424=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000875=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005230=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005480=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001895=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003493=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003415=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001976=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007859=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005788=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003454=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003061=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001329=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003961=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007138=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003852=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001245=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000235=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004667=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002947=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007056=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003975=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000413=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000899=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003342=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004620=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006654=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004163=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003408=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005634=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008437=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006206=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004235=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005002=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004334=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001951=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007640=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002081=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005495=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008324=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb000000698=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008290=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001938=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006216=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007638=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007317=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004671=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000645=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003050=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008220=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008406=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000209=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004531=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001219=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007826=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005870=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004502=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006294=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003311=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007575=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000564=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003652=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005465=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000690=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008387=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000604=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003534=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003511=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000509=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004275=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000797=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006977=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001818=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004410=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002735=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000218=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004135=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007249=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001423=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006177=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002801=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004221=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002816=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003704=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002629=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006843=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006576=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000584=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006738=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004674=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006688=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000214=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005868=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006962=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001478=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007411=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005370=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002159=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002304=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007874=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000544=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007428=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000685=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004067=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000345=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006387=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003330=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002611=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007732=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003860=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001170=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006301=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002595=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006642=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001635=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002358=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003871=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007023=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003693=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000689=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002710=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004770=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005783=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004314=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004561=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000480=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004222=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006566=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001100=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006526=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000384=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001486=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007290=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006304=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001045=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002109=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006067=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000036=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002777=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004442=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002078=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001995=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004204=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007456=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003175=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007798=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000157=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002850=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005385=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002709=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008254=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005880=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006244=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003204=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008282=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000005190=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002631=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005098=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005764=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003692=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002348=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001773=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003041=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007180=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006544=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002610=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008090=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005140=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005209=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005794=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002527=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007425=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002949=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000486=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004566=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003198=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002317=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005654=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002818=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007489=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002386=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006068=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003976=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008115=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002306=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001751=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001371=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004761=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007595=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007084=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004406=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002106=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007154=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003902=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000369=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005474=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000022=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001451=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003601=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005261=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007566=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002594=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004872=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007652=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001608=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000324=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003045=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001544=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000928=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004159=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002640=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007787=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002787=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001388=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000297=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004947=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006499=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007206=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006444=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003476=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007106=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001491=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001059=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004444=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006328=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003718=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005913=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005052=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001310=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001939=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004390=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005156=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005263=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001821=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008125=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005765=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007507=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000352=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006089=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004568=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002011=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007419=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006485=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008422=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008320=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000003722=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004172=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008194=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004683=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001257=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001289=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007919=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004170=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002657=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003028=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007358=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003084=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006597=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005453=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007669=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000464=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006823=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006992=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007603=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003238=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007546=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005132=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003784=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007804=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004017=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003639=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002692=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002296=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007622=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004904=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001220=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000466=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002205=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004300=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005419=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004468=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006212=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007873=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004412=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000298=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002598=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006701=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000373=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002231=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001897=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003466=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000180=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006165=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003792=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007391=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005675=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002227=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000695=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008080=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003550=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002455=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008319=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003653=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000085=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004274=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007329=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002914=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001627=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001754=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005418=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003206=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004358=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004321=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001509=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003064=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002978=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001711=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002062=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004563=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005447=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003754=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001837=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005818=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003145=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000435=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005198=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001467=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002819=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008335=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000951=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001957=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004057=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002406=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006671=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007570=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005038=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003874=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002754=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000042=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004419=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007193=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001662=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001130=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001924=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006402=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003392=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006314=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006253=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008072=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001788=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005956=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006418=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004832=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006198=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001724=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001003=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007008=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005937=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006868=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004672=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007959=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008138=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006924=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003131=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005703=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001222=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004372=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002698=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001359=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000287=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002484=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000659=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005069=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001564=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007060=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004008=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004072=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001239=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004708=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001850=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003967=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003185=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005488=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001550=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007239=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002636=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008418=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001182=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001838=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000146=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005081=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003191=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001214=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007634=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001683=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005491=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000495=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007560=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000233=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006815=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007894=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006904=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000957=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002988=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008054=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000519=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006389=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001729=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007524=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001946=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003003=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002931=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007244=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001749=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004547=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000994=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003290=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006316=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007376=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000927=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000733=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007098=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000481=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001227=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001709=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006545=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007818=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006155=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001896=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006800=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003265=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005601=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003806=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004184=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002929=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004866=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004670=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001445=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004611=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005254=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003757=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000227=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007905=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000986=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006364=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002219=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002630=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006859=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000835=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008314=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb000000872=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004423=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000925=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002807=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005629=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004716=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007545=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001925=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003773=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001413=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005985=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004139=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002345=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007071=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004968=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002605=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001665=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006588=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002845=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008157=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002722=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006729=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001528=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007662=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004330=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008166=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004534=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005630=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005964=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002922=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000989=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007335=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008141=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001266=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008119=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000106=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006345=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001983=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001448=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003152=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005395=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001902=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007172=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000387=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000356=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004745=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000881=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007401=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003056=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008091=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006880=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005832=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000843=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003766=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003031=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005662=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004494=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000348=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002504=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000050=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004033=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005009=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003716=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008367=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001863=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004691=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003376=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004660=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002838=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007203=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006416=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007369=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007983=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007191=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006816=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005413=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007563=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006489=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002763=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001537=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003623=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004175=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007584=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007270=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007159=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005683=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006542=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004328=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001039=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002094=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001985=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004585=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004471=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005899=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005957=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002941=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000744=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001346=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007238=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003596=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002351=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005905=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007462=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001755=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000454=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000468=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006534=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002498=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005890=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006172=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004853=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000805=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005652=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004729=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006850=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008315=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008428=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000358=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006928=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006523=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003818=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005752=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006954=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003137=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002326=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005451=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007736=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002194=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000276=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002035=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000970=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001158=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008104=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000407=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007157=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002390=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007768=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002275=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000549=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002642=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002056=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001763=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007780=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003602=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007397=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007860=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004145=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005316=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002021=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005734=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002976=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007107=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007958=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005457=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003300=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008376=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004631=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003142=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008004=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008373=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003014=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001907=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008346=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001303=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004515=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007366=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000597=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002643=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005312=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000755=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005471=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000771=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003345=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001816=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001324=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002393=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005875=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006782=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002684=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006493=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000609=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006401=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000818=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002439=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003070=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002691=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006133=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002457=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004398=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001335=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007737=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007548=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007381=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002265=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006837=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000194=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003802=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002212=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006672=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003578=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005584=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007038=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006109=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007623=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003816=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003343=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006667=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002713=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004299=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003849=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007932=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001500=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003764=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004635=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000491=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003661=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006755=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008446=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002726=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002385=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003425=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002247=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003985=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005022=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000583=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001461=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000873=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004961=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003544=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003711=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006408=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008388=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007218=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008187=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007550=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005353=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007809=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007196=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008143=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002169=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002285=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003992=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004764=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008070=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008258=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000004528=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004759=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007161=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007692=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007696=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001012=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006625=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003114=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005598=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002666=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000708=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003074=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002076=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003187=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005329=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002651=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007248=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003882=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004034=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001901=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000756=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000844=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002431=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007845=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004794=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002946=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008196=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002745=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005986=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004230=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005386=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004772=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000746=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005278=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002333=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004520=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007536=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004714=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005970=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005330=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002725=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002187=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002878=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006088=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003646=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000569=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002248=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007579=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003571=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006400=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002839=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006911=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000525=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000931=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006273=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007730=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000417=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007240=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005597=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007457=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001188=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007155=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001104=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005361=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005270=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007326=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003389=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007559=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000924=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004755=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007648=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001466=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005740=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002073=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003715=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005716=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002766=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002138=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004024=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004040=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007543=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002927=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005936=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000291=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006989=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006758=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006069=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002362=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006976=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006785=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000429=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003502=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005368=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005138=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002564=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008204=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007424=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008253=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000078=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004150=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006458=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003385=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000132=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006070=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003154=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000376=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000216=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000784=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005442=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000455=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007537=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006899=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006390=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001110=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002565=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007883=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007143=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004162=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003807=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005355=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004633=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003777=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001894=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007148=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004993=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007667=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006050=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003965=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005721=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002037=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005908=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002273=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002987=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004351=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000982=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001549=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000183=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008360=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004106=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001746=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003814=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006574=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000065=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005034=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006656=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005114=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006217=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004884=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005017=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001868=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004689=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006327=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000202=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008114=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002860=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006543=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005724=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006503=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003471=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007771=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001800=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007695=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001870=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004967=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002515=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001006=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007345=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002299=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003012=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006674=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003205=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004253=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000083=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003675=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007384=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006005=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001817=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004440=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004365=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000478=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005981=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000419=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006183=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008100=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005776=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002328=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005872=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006710=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001293=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001083=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002322=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005708=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006150=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003657=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003828=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008087=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005554=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006804=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003122=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008229=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb000000325=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006001=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000985=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002051=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004899=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002721=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003464=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001513=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003982=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005760=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006160=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007484=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001852=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003634=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000824=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004461=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007289=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001556=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006277=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001129=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000624=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005990=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005539=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008370=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006827=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004028=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008440=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002088=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005134=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006477=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000436=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005180=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001730=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000141=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000971=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006766=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003588=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005045=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005172=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003726=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003023=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002034=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004694=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004263=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006142=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003302=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002866=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007783=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001005=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005071=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003087=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006628=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004613=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000918=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001826=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000629=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000853=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006176=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002019=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000175=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002727=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001162=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008232=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001706=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000261=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001258=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002432=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002655=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004577=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001046=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006209=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003978=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003450=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007686=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007367=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002977=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003808=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006310=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003720=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004850=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001679=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001547=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006883=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000750=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003069=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002055=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000469=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006621=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005422=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004907=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005460=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000720=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005791=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004366=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007503=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004938=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000959=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002254=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006344=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001384=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001722=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004523=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007300=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001274=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001954=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000736=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006592=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007727=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005226=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008148=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003952=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007506=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002170=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006130=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006673=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001333=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006369=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007108=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007408=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007364=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004836=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002178=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003590=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001577=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008394=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000094=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001519=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007276=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002290=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004215=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008345=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005549=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006317=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008181=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003600=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005037=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005469=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003913=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002578=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001981=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000103=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000601=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004595=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004622=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005745=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003364=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000774=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006893=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002685=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003752=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003350=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006225=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003886=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006907=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002100=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007957=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000568=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000427=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007969=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000155=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004511=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003457=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005555=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002032=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008207=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002935=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002009=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003707=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002600=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002872=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007674=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006746=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002837=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005595=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005618=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005321=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000963=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001415=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005194=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002749=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005062=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004797=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006510=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006084=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001062=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004723=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004350=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007673=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001493=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001526=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007633=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000293=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002038=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006589=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001785=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007273=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001057=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003340=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005705=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001094=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008107=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006616=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001334=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000821=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001525=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001212=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005477=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005802=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000300=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008294=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000003337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006653=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005972=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006210=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001151=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004248=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007815=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006392=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006595=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000230=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002788=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008139=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006507=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003209=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003528=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001658=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007878=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002537=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005293=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001723=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002680=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005364=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003510=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003958=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008073=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001444=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004421=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001050=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005454=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004627=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000974=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004055=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007608=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003645=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007064=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000198=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004615=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004682=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007233=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001879=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007772=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007316=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002672=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002999=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007744=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005567=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003361=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007903=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002500=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001693=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000627=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000722=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006923=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004016=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004478=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006004=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003116=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003397=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007446=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001694=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002623=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005702=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007496=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007481=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000161=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000524=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004656=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005489=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002955=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004231=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005326=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008341=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007854=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001595=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003611=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001757=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005824=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007204=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006432=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002129=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005224=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008137=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007770=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008221=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000002234=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000723=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004647=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006023=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000651=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000089=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000770=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000856=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001906=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008313=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000006377=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005250=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003346=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006689=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002342=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006983=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007468=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002783=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006747=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006687=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004917=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007096=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003373=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006536=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007003=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000793=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008158=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000634=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006057=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002790=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004324=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004404=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006961=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002604=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005546=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005056=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004014=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002364=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005449=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005484=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005725=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000013=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006307=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004955=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000172=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000611=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002426=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001342=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007388=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001406=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004586=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002507=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007968=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001408=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001127=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005004=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007014=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005335=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007304=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008124=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000234=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000687=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007642=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005709=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007152=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006832=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001463=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004663=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002147=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004030=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007980=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000096=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001165=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006694=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002619=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004265=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001112=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005503=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004487=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000688=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008242=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000961=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004447=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001475=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002583=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000340=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004143=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007775=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003244=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004418=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000734=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008252=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003800=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007965=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005888=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007844=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005693=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001422=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003988=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002828=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004736=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007435=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006615=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002049=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003518=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000965=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000737=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002762=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001535=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005213=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000997=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006030=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004583=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007286=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003727=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003202=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007178=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005011=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006255=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007908=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003170=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005698=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001739=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001682=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001583=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002309=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005914=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003769=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003938=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006590=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005651=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003519=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005300=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001555=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007884=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006716=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004466=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004698=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002355=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005167=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004684=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002588=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003223=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000630=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004519=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007444=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003484=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006622=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006577=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006007=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007888=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004420=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005730=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003486=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006263=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004169=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003390=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008257=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000006179=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007741=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000933=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000598=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004727=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007151=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003790=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000116=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007539=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005150=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006025=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003758=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007074=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005636=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003723=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004786=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004407=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006045=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007447=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003046=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005550=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000923=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003592=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007410=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005358=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006619=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007530=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004892=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006333=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002693=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000665=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003260=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001314=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005845=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000038=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005244=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000717=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008344=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007898=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001260=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007378=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002175=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006921=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001404=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004110=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006881=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000753=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006059=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006384=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004414=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000859=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002705=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005483=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008249=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001930=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008215=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001381=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007802=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007291=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007935=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000097=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007985=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007011=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003016=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007031=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003842=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006412=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005785=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000587=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003815=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007682=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008430=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005975=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007750=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002334=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006125=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000864=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006761=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006420=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003323=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005456=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004765=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007001=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006978=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001275=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001969=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006298=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003030=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006170=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002744=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006541=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000197=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004207=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001685=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005545=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002974=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001765=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006388=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003013=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008042=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008305=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006448=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001529=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006015=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005910=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002530=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000443=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002354=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007073=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005821=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002591=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007079=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006873=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002341=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004173=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003195=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003616=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006650=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005510=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002261=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000411=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008040=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004787=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006196=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005450=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008413=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004156=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004352=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000898=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004678=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008393=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008212=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005369=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008448=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004213=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006481=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001947=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002614=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006063=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007901=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007305=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007337=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004945=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006014=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004266=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000111=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007672=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002925=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002312=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007799=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007629=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006205=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004464=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005771=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006275=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000823=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005187=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001136=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005966=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008059=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008074=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002795=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003670=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002876=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001285=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002753=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006852=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000643=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000658=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007675=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002601=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002734=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004347=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000221=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006675=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002861=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005543=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008349=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001138=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008386=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000901=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005528=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003225=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007000=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000192=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001932=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007913=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004598=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002814=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008193=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001069=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000282=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006606=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002945=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008329=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000002470=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007109=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001184=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003449=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007163=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003905=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001714=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008198=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006465=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004800=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006985=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003906=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000701=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003787=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005869=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003097=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004776=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005850=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006657=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007907=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007831=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000833=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007738=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004544=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003570=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003256=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004354=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003395=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002533=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005212=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003147=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007800=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002474=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005679=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003672=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005874=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000471=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007745=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000575=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000488=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005269=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008432=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006037=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005871=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005463=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000510=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001117=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001713=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001092=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002778=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006531=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003730=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005222=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005628=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004882=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000414=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004750=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001628=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001674=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007659=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001355=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003772=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006279=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006851=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004465=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002770=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001979=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005676=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005583=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001804=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003879=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005945=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007033=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002681=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007639=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003834=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005548=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005421=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000987=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005013=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003603=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008447=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004665=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005682=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000416=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003635=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003916=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002174=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007287=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006504=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003910=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002398=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000398=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004990=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007072=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002813=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007258=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003924=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003214=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007114=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003135=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003823=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007308=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007253=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007208=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008117=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007504=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004950=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001246=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001703=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008310=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006437=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003770=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007982=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003763=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006540=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005717=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007105=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003439=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001011=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003713=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003538=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002249=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006944=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007039=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001458=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005837=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004711=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002404=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005570=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004814=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004076=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007649=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007856=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004644=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007298=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001173=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004180=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004889=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000127=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003491=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006117=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006705=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004182=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000487=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004681=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007254=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007946=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004817=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001061=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005929=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002320=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005538=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004610=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004359=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001284=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002263=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004113=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000827=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008014=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002921=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004261=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003001=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001887=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005438=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001781=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007967=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001263=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003630=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008151=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007234=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004431=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001877=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007081=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003112=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000010=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007210=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000399=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004740=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008064=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000772=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000232=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005061=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007665=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002919=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005563=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007654=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005697=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005197=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001933=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007605=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006296=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001735=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003664=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003632=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007223=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003398=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001795=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003666=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000936=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008179=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000069=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005048=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004223=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003099=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004811=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004658=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006424=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000357=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003898=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003228=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001700=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007037=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000400=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004051=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002626=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004134=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002573=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002703=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007212=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000274=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003357=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001684=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002715=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006594=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002697=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005105=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002444=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007133=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002522=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007231=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003915=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007398=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000314=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006008=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003321=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003006=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003101=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006876=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002319=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004000=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006515=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000952=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007301=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003104=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007082=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001652=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006613=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006085=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004848=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008170=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005494=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005333=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002186=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000713=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003665=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000545=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004348=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005328=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005072=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003360=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006062=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008284=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000316=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005511=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000654=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002372=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006872=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000426=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001978=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004506=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007619=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007567=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002092=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001736=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000903=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001882=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000728=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004986=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000326=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000145=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006322=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008163=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007214=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006896=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005241=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004362=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000273=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000794=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007996=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007587=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004066=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001899=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007532=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006083=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003103=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007705=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003900=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000582=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004606=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000463=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004571=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008131=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007735=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002774=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007664=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005267=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006501=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006717=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002298=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006602=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005535=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003468=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003537=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006181=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004046=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004675=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004612=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007943=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001909=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001580=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003517=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004923=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006918=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007658=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004783=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003338=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004129=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001313=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006693=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000816=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006847=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006926=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003604=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006370=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004766=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005901=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000700=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001680=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002940=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000870=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002151=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005653=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006348=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002010=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002688=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001319=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004247=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000667=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002728=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003366=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004704=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004257=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003801=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003521=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005655=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007497=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001962=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003974=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001172=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003247=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000540=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000303=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001734=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004345=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000907=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005262=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001210=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005204=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004042=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007227=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000493=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000439=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007036=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000801=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002581=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001311=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000185=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006231=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004626=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005191=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003058=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005235=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007272=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003082=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002142=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003748=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000428=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001462=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004510=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006339=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004389=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001752=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006882=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003304=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000815=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003430=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003873=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002028=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000068=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004936=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005656=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001610=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003113=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002412=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005625=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007938=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006480=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000560=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006040=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003986=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003132=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001010=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006447=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004371=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005128=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004912=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004608=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005192=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000219=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006441=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003284=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001201=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000178=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001186=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008123=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005153=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004462=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008206=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001255=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000135=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000747=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007392=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005248=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005302=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006342=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001091=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001900=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004870=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004865=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004245=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006560=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004058=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000476=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006660=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000059=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004224=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005586=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005318=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003950=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003509=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005366=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007597=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005799=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000091=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004902=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002956=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004862=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007431=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003151=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001653=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007175=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007350=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006290=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003079=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002741=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000930=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006242=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002476=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005015=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003319=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002014=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000729=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005383=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004632=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006801=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005759=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007296=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001867=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002269=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004981=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000632=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000503=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001753=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003959=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001347=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001205=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004068=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002938=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000241=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005039=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005232=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001287=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000066=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002617=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006353=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004302=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001719=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005911=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007900=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001149=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000697=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005767=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005866=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002546=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003075=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002968=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000954=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006570=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001228=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007651=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000890=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007246=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000253=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005234=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004948=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006953=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002773=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002740=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002162=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003695=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002730=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007516=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001663=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005051=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004227=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001430=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004428=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005113=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000641=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003941=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001479=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006136=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000555=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005429=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005599=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003107=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001076=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005877=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000531=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001565=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000862=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000596=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008383=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002963=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003554=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001152=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003907=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008036=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002137=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007141=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007806=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003736=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000374=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002477=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008356=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003120=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005390=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002095=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004475=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004924=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007165=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007199=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006958=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007848=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006644=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006224=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001243=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007502=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002091=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003610=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008223=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006338=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002213=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000159=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004416=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004339=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006513=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001362=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000113=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003353=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004053=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003226=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003663=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006291=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000032=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008359=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001728=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004479=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000012=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008046=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002558=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005428=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004095=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001135=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008155=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000057=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007526=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001842=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000633=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004009=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007135=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005189=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003419=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000425=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001095=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005112=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005492=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005688=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002144=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007181=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001192=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004186=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002868=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002930=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003629=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006742=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004785=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004703=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004731=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004762=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001704=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002251=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003436=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003928=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000323=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004473=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002352=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001179=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003533=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001450=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000279=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003762=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007534=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008291=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007327=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005135=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002909=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005762=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005292=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001374=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004103=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000988=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002232=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005638=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008182=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003733=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003971=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000547=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000563=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005912=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007243=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001128=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003577=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004799=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005763=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000922=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003859=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004249=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000285=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007310=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001917=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004769=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004560=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006399=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008261=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002747=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004357=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000511=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008111=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007704=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006655=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007318=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005341=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002052=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004281=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005412=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005580=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003622=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003613=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007137=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005649=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002942=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000757=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005095=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007156=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004164=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004144=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004007=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000244=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002102=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000317=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006079=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000521=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003885=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001945=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000992=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003845=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001000=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005607=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001777=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007602=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000865=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007237=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003741=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000983=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000712=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002004=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002160=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001963=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000087=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003608=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002632=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003461=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004503=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006916=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002752=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004306=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002759=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000160=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002674=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004131=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005274=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000120=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007012=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004825=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008026=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008343=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007949=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001183=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005441=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005626=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001813=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005149=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000617=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006737=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006246=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003883=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004720=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001905=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004408=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006981=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001766=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007404=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004070=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003076=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000807=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004668=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000286=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005860=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002475=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000349=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002408=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007922=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007271=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001910=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006272=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004637=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008144=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004125=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004041=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008371=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005245=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002417=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007614=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003234=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006190=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005478=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005517=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007062=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006640=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001501=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001952=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002201=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002865=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008164=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001872=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003813=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006732=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002430=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004233=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004185=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005955=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006611=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005585=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004084=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004875=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000112=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005400=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005742=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007068=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000932=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004178=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004268=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007232=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001032=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004500=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002867=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004355=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003310=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000773=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001386=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005939=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001926=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004741=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005617=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007549=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003437=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003355=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005157=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004717=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001588=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006943=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008350=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002039=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005103=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002449=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003594=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004081=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008162=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002990=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001203=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002702=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001202=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004377=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005566=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001523=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007010=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005536=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006866=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006186=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002570=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003285=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007149=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006095=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004840=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001417=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003563=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002376=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004472=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006584=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003036=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004272=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008235=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003556=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007112=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001779=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001557=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000320=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001510=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008135=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001446=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005830=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005862=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006636=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000143=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004880=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002904=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007499=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008011=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007312=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006617=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002803=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000465=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008047=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008311=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004538=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004436=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006276=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003413=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005594=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004687=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002580=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003778=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002357=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001247=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006984=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008012=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007220=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004501=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005097=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007653=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008065=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004989=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003498=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001024=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002067=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008200=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003694=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007279=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008016=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005839=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001065=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001650=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000968=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000168=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005700=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000764=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004100=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007250=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003929=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007962=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007677=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007685=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008052=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002576=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002622=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008397=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000905=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002274=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001427=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003585=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007179=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005931=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007094=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002511=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000631=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002877=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001567=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001306=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000966=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003062=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001613=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001918=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003895=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004713=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000242=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004861=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004844=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004405=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000058=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006366=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008299=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006066=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002480=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008442=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003761=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004296=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005529=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007960=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005448=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005561=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002975=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002915=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003115=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002888=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002760=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007823=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004541=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003615=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000239=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003269=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004495=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008116=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002503=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004617=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006580=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000834=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007013=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001035=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003505=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004451=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006806=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000929=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005746=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000810=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002452=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002711=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000707=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004146=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005803=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008266=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000008195=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001531=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003560=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006927=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002965=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005778=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000006784=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008443=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004189=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007200=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004778=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004060=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006877=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005028=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005320=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004607=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000124=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003872=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005853=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001561=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008101=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003200=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005924=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006638=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001720=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005512=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003579=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002499=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000431=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007970=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001365=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006410=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004597=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001225=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002690=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007334=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001810=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008296=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000758=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007393=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002501=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001234=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002163=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000822=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007275=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002562=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000246=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006713=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001820=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006959=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002188=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005367=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007712=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007590=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001941=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000367=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000453=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005287=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005976=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000829=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007950=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003039=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006680=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005600=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000552=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001047=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007542=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000727=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000485=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005780=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002520=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003305=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008089=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005967=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002407=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007373=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007555=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005798=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006593=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007438=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006702=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003251=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001558=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004021=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005348=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002677=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002440=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001851=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003831=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003981=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000567=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002024=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000668=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002822=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007841=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006878=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006334=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005464=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006159=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007680=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000977=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001457=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004821=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005928=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005608=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007716=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002821=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003546=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002555=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000187=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003136=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006549=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000699=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005843=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004946=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007348=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007838=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000565=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000056=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004690=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000863=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004384=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003606=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006809=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007465=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002483=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006207=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006718=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007910=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006509=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005741=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001249=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000277=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001847=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007351=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001115=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001748=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006126=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003496=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006791=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003735=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003500=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008098=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005896=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006527=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005164=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006427=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006061=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005569=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002365=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002824=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003587=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002199=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001721=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008050=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008149=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006979=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002378=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005969=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003516=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006932=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006835=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008236=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000004616=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007739=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006901=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006032=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004454=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005046=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004625=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004179=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002984=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007865=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006156=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000252=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007226=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004011=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004738=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000074=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004869=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005723=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000642=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006060=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003286=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005185=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002855=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001125=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008322=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001772=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007961=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000354=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000473=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000760=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007644=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006975=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001425=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002085=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008173=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006289=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002391=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007837=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007416=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001231=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002775=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000813=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001468=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008234=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000029=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006211=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007876=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007455=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005125=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005531=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004933=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002143=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002561=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003887=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002706=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002599=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008222=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006496=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003393=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002825=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007476=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005377=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004818=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006199=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004310=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006260=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003432=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003248=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005787=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002167=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007951=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004552=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007049=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006449=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007321=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001014=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002887=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008199=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003059=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000759=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000158=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003453=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006826=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007171=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002141=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004188=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003683=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006185=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004136=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006631=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000156=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000514=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002958=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005674=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007742=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005256=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005770=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007437=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006393=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001692=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002884=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002624=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003888=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004735=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000102=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003169=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005425=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008039=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006261=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005642=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003897=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007766=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006910=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005959=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005728=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002225=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001571=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003503=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008191=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006124=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007078=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000179=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001576=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000830=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004910=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007847=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005088=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000958=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004522=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003203=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006450=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003130=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001570=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004546=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007953=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003605=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005897=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008133=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003212=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008071=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000294=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007477=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002534=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003176=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008262=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004197=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001828=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003236=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005207=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001167=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003262=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004903=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005104=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000118=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008244=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003382=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002445=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007102=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003354=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002971=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008145=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001156=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006998=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004489=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000640=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005953=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007356=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002920=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006968=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008372=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005220=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002607=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007760=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001651=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008203=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006700=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005201=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006512=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005279=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002660=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005057=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000542=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004570=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005030=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006556=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008260=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008326=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007415=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001079=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005624=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006696=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005927=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000031=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007824=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003640=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003680=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000329=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000920=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000766=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002428=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002800=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006376=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002782=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002717=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001586=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002479=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003678=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003880=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006058=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008174=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008420=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006783=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006264=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001690=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002924=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006763=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004795=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007076=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000600=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005668=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001506=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005437=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008095=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007449=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007613=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006256=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000462=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005685=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001970=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001670=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007867=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006706=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002198=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001027=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007292=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000710=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003947=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004015=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000876=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007808=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007448=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001972=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007656=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004628=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007734=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003998=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005050=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002912=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002671=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007612=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006511=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003669=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004655=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001426=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006488=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000607=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003181=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004962=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002646=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005485=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005707=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005182=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002665=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005544=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000302=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003612=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001949=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004826=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008060=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008295=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000131=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006862=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002425=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001764=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004654=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004629=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004065=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004287=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005775=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001402=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001474=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002542=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007277=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007762=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005889=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006302=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002157=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002585=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004988=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003508=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004234=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006731=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003863=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001097=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008188=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007052=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000440=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002373=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003431=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004038=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000537=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006957=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004697=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004881=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006562=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006241=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004942=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005691=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002582=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000309=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000061=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001618=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003598=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002961=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007406=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000851=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003739=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006867=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002145=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004526=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007693=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000404=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001695=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004564=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004974=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001343=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006097=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003724=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005987=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002712=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007561=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006922=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006780=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004992=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001619=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003740=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005181=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002084=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005825=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001589=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003210=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005314=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005116=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005756=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006676=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004719=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005420=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001928=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005761=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002936=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002262=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001370=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003094=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001637=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004385=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003007=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003365=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001497=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005867=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005933=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002928=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002424=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003108=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004422=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003625=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005423=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008275=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003705=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001085=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002211=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005645=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008197=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007846=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001789=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005033=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007341=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005228=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002970=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002905=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002606=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005275=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008281=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007814=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000614=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005989=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006931=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002654=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000040=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000693=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005117=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001034=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001677=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003427=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006745=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007861=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001390=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006662=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005047=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005941=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002110=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002805=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003581=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007635=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000494=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000741=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000000=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005339=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002176=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001562=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001524=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002615=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007403=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000082=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005115=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002768=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003597=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001982=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006262=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007347=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005633=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006506=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003925=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001616=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004460=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004543=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003109=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000681=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002571=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000308=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000656=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006627=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007748=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004742=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004539=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001515=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004791=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004802=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007606=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007645=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001276=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007764=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004847=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004972=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000024=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000767=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007890=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005240=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008092=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006768=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004097=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005086=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003641=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003677=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001456=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006492=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006464=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006144=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003480=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003095=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005021=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002369=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007698=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004831=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003477=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005147=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006719=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004725=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007805=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001233=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001710=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000892=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003024=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002327=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006299=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005307=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002069=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007525=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006213=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001296=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004094=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006195=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004200=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005317=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000151=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006748=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005551=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003253=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003263=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004438=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005991=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002948=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000477=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004609=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000125=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006663=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003363=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000208=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000779=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001602=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008185=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002871=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005658=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008425=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006308=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000536=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000389=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001315=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006774=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006013=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007601=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006266=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002932=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005417=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007021=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006398=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000134=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005506=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007976=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003783=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002294=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006711=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003335=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006572=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006128=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003633=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005694=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000726=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000076=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000450=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003489=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005819=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007722=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003875=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002116=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003892=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003526=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000655=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007486=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003216=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002204=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006950=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004838=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003674=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001737=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006238=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001230=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004052=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006411=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008366=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002832=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005143=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002105=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001171=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006012=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004319=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001484=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001974=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003647=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006776=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002089=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001320=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000622=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005513=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000418=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004548=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005427=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007473=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000981=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008210=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006074=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003499=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006698=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005612=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002321=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007577=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005690=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001295=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003324=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007158=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002539=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005960=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007933=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000517=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000906=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001119=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003969=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005954=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002659=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001015=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004005=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005084=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006937=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005514=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003193=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001993=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007134=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003572=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002370=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007236=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004476=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003000=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002858=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000919=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006154=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004439=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003530=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006818=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002284=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002593=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000762=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006192=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002789=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007487=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003018=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008399=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000055=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005124=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006685=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004217=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006090=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006528=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004229=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002524=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001060=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006797=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004842=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003832=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007111=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007835=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005827=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000789=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007915=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002020=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000556=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002244=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004854=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001399=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000538=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006138=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000795=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004804=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006585=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007828=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005166=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005557=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005391=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007558=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001936=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006300=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006533=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007975=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003780=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004701=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003935=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004295=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005010=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007245=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002804=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000984=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007055=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000381=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007485=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006691=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004104=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000052=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008351=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004149=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000412=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006641=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004728=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003481=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001229=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007931=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001782=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000402=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001859=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001625=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005552=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005829=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007610=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000955=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008391=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000114=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000674=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007167=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003428=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006351=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004824=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004744=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003328=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000408=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000098=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006475=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002662=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006955=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005519=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003795=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000064=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001801=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002548=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003838=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005526=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005378=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004623=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001612=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004877=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007676=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006361=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002964=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000501=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005110=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003421=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000551=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005371=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007759=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002661=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001068=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001971=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004214=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002742=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000318=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000359=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007813=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002448=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004356=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008005=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007050=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006807=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007906=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001226=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003714=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001323=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007990=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006413=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000152=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006798=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006610=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006846=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002182=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005403=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000210=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004752=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008003=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005560=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001241=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002403=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008159=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006803=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003551=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007963=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004863=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000047=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005216=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001217=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001235=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006386=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008002=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003901=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006704=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006189=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005487=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001600=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007475=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001919=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006357=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005661=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006451=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002900=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002532=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001643=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002104=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000978=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003963=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007466=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003081=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008168=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003835=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008057=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008450=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002495=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002751=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007840=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007616=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007092=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001498=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008300=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007230=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001087=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000346=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008017=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006031=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001563=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008128=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000662=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005623=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004191=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006104=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005534=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001959=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005949=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005323=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001452=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007426=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003576=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000777=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007241=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004521=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001269=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003856=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008410=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004039=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001808=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007694=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007952=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000028=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006406=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008035=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007495=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007954=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006152=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006708=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004374=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006120=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004803=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007758=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002637=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000507=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000914=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005500=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002720=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007144=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003402=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002040=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003644=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003909=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002435=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003638=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002992=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003927=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004878=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001372=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000610=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007752=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003793=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004914=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004841=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006096=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000410=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004709=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001073=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004387=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002907=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005754=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008400=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004873=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005915=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005408=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002083=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002911=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006633=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001512=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000388=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004492=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001281=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003862=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003490=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002344=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004228=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007006=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000167=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004382=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003558=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000740=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007791=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007929=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007058=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001718=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002889=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004954=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002165=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002338=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000999=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001395=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001470=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006269=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006870=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002663=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002528=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001358=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000953=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002323=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002367=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004364=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003294=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004956=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007211=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006148=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006494=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004091=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000848=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000996=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007636=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005852=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005732=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008241=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000008130=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008007=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008216=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008150=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007299=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007045=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004915=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000857=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003035=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006073=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003987=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004099=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005861=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004970=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005779=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001836=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001858=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008224=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004284=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004835=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006469=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006202=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004198=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007830=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005650=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000268=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005440=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006374=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000626=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000119=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002042=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005162=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005800=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007269=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000798=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002071=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001064=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003776=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001638=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000169=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000910=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007198=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006495=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003942=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008342=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006973=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001540=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004771=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004843=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003017=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004432=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000505=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006909=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007691=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005613=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006103=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005183=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007293=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001352=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006349=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002224=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001864=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001419=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000086=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006887=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005159=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003710=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005393=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005304=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001596=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002772=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006470=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007132=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007353=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006942=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005170=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008251=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000006245=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004208=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000855=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003179=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003706=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002297=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002013=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004815=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007085=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007719=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007123=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001223=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000877=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007040=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000696=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005284=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006340=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006208=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004634=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007432=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003690=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003946=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000275=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000170=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000837=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002736=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005726=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005847=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006871=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008395=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003583=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003370=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008218=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007007=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003065=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000182=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005978=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002420=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000572=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005356=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000743=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002875=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004604=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001948=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002397=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000991=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006703=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005490=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004205=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000290=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007607=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004737=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002769=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006555=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006270=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006842=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002874=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004876=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001348=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004256=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007626=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007017=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000508=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008441=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003221=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005342=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005349=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001207=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004743=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004480=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002779=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007833=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005271=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004062=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004259=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005793=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006140=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000709=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008134=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005684=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004715=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000527=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005252=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007697=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007928=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007914=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004260=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006831=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004806=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001431=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004901=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000800=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003096=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000702=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004849=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006167=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004226=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005579=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005965=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004341=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001756=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001630=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005952=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006053=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002468=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001611=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004657=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003529=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001211=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003540=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003442=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003303=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003989=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005722=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006994=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002810=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007441=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005958=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003168=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006473=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001862=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000570=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005064=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006064=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003213=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000446=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003523=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006999=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002739=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006535=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003229=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004994=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002568=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003404=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002108=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006460=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002827=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007093=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001861=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006435=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006087=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003826=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008000=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004833=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000502=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005748=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005444=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005106=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007205=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003893=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000283=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004252=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006740=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006579=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006849=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002450=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002315=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002743=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004326=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001379=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005078=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000137=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005547=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000422=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003993=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003451=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004133=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006735=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000849=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004746=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007511=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004504=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005443=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003939=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000574=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001542=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006681=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005468=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002566=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007362=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000943=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002061=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005507=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007041=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001297=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001769=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002985=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001633=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001965=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000204=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002394=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001799=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005184=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005351=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004116=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006169=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006861=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006036=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002409=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004894=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004199=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008280=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006098=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001026=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005719=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003249=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004798=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003233=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007625=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004525=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006203=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003021=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007794=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005372=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005415=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000255=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002883=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006162=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003709=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001890=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006365=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002916=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002277=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003698=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007594=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000304=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006076=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000105=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001846=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002461=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006770=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004087=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001671=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001533=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001830=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006913=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000979=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006546=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001028=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006421=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008109=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005564=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001271=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007394=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001657=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004238=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007464=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005294=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005749=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007801=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003068=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004349=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008067=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001439=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007882=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006478=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004834=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007509=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006153=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003532=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003258=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007101=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001767=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004400=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003049=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005892=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001449=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006487=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002257=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003561=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004867=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001055=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007430=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004524=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005251=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003015=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007201=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008112=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002535=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008230=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008408=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003230=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005273=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003406=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000535=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000077=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003996=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004109=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003105=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006137=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006971=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007086=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003667=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004496=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006367=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006885=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004530=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000803=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001016=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000880=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000108=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001300=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002399=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004403=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005237=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006726=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007429=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002723=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003767=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003980=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000310=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004456=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000738=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000228=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003060=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000691=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002488=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000703=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005171=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001357=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004141=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008108=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001922=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001935=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006582=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004895=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001460=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007400=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007997=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007139=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001984=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003288=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007320=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004808=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002134=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002136=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003738=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004079=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004733=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002512=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004269=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004151=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003089=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000998=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003246=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008079=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006350=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002270=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005795=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007026=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001412=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003945=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007628=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005144=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005881=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000944=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007195=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001904=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008273=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005720=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006575=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003582=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005575=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000223=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002791=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002843=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007786=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003241=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002131=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003737=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001727=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008056=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000451=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004029=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002490=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001262=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006646=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005029=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002050=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006362=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006268=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003359=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008190=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002041=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004569=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003643=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008023=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003034=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005859=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000280=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002465=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002891=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001573=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001691=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000271=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001007=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002127=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007121=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007247=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006632=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001855=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003702=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006605=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001199=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001875=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007936=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007225=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000272=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005571=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006939=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007183=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001174=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000730=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003129=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002266=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002230=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007793=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007057=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003785=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005610=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006908=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007934=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005753=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005285=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001623=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002496=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002185=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000420=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008304=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002853=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003172=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001716=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004171=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007825=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005948=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005040=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003607=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007930=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000995=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005083=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004237=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004699=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005882=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000046=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002792=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007260=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005082=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006972=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002218=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004367=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002881=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002997=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004978=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000623=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007116=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005473=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002340=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005432=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003331=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002890=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006010=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006378=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003356=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005648=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004045=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005305=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001008=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007896=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006658=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007262=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000212=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000778=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006993=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005962=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000791=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006697=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004535=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000148=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006347=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002608=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000360=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002704=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000397=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007850=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002015=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003621=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003920=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001530=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003968=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004582=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007528=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008082=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003930=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003891=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007538=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000109=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001943=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001393=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005068=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003201=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001398=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005903=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001593=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006845=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000939=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001678=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001435=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001396=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002908=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003299=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002279=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003557=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005089=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002820=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000878=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000490=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002973=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003126=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002652=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004653=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000559=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000377=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000265=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007720=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001603=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005918=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001655=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004211=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006991=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000646=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004332=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005122=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004960=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003306=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004455=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007956=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007880=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005008=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006462=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006739=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008334=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000043=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008110=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005344=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005606=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008407=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000278=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002413=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004195=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008147=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007284=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005835=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002579=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007480=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006436=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003063=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006341=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007390=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003649=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005842=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006995=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000250=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004277=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003042=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007339=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007174=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004117=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005704=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005111=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007778=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006997=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003485=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005445=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002510=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006019=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003867=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000034=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007747=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000650=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000811=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006354=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006134=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006793=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001944=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005266=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000366=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005025=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003771=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004562=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001339=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003092=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005522=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000648=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000783=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004996=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001002=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007494=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000334=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000236=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004064=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003721=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004767=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008126=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003465=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005977=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006115=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003747=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002054=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008401=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001066=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002786=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006132=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001131=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006956=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000045=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008078=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001807=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000819=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002456=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005659=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001122=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005280=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000854=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003840=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006919=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001640=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002856=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000458=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008385=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002191=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006474=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005973=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004035=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002653=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001193=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005387=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002885=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003918=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004856=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005217=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002336=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004458=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004722=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000869=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000841=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005786=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003160=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007382=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006980=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000341=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001876=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000921=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002287=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000150=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003962=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005130=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000071=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005399=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001784=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007115=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003593=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002590=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003541=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004792=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005249=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000893=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004859=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005394=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007724=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003194=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008330=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003685=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007547=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007303=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005374=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004649=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006233=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001989=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004641=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003970=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001599=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001770=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002003=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005971=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002002=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000292=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000967=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006522=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007235=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007124=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004123=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000337=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000679=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006948=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004209=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003312=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006319=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006431=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002846=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002119=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003734=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002454=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000911=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000558=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001291=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005324=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006894=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002560=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004482=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008267=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008381=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001620=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004115=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004908=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005381=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003786=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001280=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002006=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006009=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003318=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007573=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004303=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002046=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007897=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003077=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001841=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007586=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000499=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005777=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007714=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002589=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004073=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007782=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002621=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006500=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004315=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000449=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001733=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002755=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002793=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003957=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008032=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003150=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007413=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003691=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004344=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004518=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003479=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007973=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007280=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000948=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005277=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004083=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001664=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004557=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007229=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002815=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004246=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004984=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007131=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002075=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004918=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008390=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007498=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000847=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005810=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004661=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007370=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006751=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001077=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006769=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002834=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006000=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001667=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002068=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004071=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006762=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006295=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004930=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005446=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004074=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006232=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004900=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007671=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005602=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006147=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005173=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001144=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002841=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003155=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007699=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000382=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007864=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001673=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006065=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004085=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005295=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005014=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002195=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003038=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002954=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003626=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001116=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000144=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001518=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008031=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008289=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000006029=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007788=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008309=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000362=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001940=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005556=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005858=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001742=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003650=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000080=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001038=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004241=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008389=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000021=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003889=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000635=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008265=duei.Cf.bind(duei).call.bind(duei.Cf.bind(duei),duei.Cf.bind(duei));var cbb0000008449=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002757=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002043=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005573=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006659=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005142=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001538=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008214=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003435=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004558=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006028=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002259=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005085=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005731=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004498=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000765=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007126=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002441=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000101=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003609=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003344=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005247=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006865=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006325=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002451=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005537=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001054=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002618=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003268=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006529=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001360=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001420=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002074=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002656=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003462=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000203=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008010=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004370=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000248=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004909=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006385=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006661=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007065=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000845=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007427=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002458=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000721=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007256=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000363=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002572=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008088=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003291=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000104=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001105=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007443=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000301=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006750=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006869=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002679=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001465=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001253=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000173=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000945=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001647=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003166=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007190=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002982=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001304=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003504=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001825=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000842=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008297=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008240=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001689=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008392=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000249=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002246=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006221=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003117=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004508=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007261=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006678=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001190=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006817=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002848=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002171=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002197=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005695=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006178=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001391=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000289=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006332=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004820=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005398=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000307=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001436=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008055=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002683=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005615=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005946=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002208=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005950=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002221=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008142=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005354=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004427=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004285=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006886=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005070=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005696=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003841=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000240=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002090=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006639=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001254=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000243=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007529=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008225=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007054=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006038=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003434=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003699=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000474=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003237=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002695=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003440=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003033=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001058=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005227=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006107=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004879=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002689=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007194=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001392=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002686=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001702=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008102=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004807=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001886=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008323=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005743=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001099=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004298=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003853=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000295=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000950=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001854=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004642=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006966=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001546=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003903=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003555=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005833=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007319=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008358=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003349=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004304=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006248=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007352=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002347=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008403=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007918=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003040=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003173=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003682=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007709=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002356=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004338=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006381=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002240=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004469=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002330=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001927=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000256=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008308=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001382=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003580=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007322=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007551=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006822=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003478=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002587=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001483=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001853=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004757=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005100=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003536=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000206=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002222=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000612=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000217=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008288=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000889=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006900=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001833=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007761=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004555=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006051=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006825=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004572=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004147=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005160=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002682=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000682=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000380=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000364=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006772=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001240=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005532=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002840=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002733=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008038=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000460=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003836=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004167=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005619=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004280=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004810=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003448=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001805=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004426=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004929=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005236=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008368=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006819=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002281=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006329=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005940=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005718=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006433=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001148=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000788=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003729=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006884=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002381=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001367=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004758=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008084=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005407=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002177=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000394=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007150=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003400=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004411=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007939=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb000000164=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002276=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000879=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002635=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000588=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003235=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003372=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004449=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006601=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003211=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001866=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004944=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000016=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000618=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003394=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006446=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006532=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003347=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002670=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004662=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003414=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006292=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002700=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003922=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002149=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004369=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005687=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001476=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006135=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002060=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007028=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003919=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003991=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005214=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003586=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004897=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000424=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002724=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003660=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002236=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001885=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005935=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006707=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004160=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001496=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000917=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000270=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000041=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006286=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004077=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007790=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004232=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007267=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001272=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002416=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004920=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004193=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007224=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002453=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000319=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008417=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004927=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006612=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004666=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003487=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004048=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000008419=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008269=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001731=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002903=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000005168=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007971=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004574=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006405=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000142=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007866=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006445=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002120=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007265=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001080=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001414=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005572=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007810=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003751=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005841=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006906=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006892=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005768=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004928=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001387=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008186=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006423=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005479=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001377=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007436=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004201=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005873=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004753=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005530=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002086=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008156=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007842=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004822=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003548=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006180=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005498=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000008412=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001048=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008333=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007440=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000129=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003914=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006173=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002209=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001834=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005133=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006520=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007029=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001181=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002859=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006828=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003755=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004788=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001553=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb00000023=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000201=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003701=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004158=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000748=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004527=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008348=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002168=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004790=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004174=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb00000035=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001206=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000306=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002271=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006607=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003654=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001356=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004932=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001322=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004177=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003564=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002196=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003388=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007228=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002917=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006949=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb000000188=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007531=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006335=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004485=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001992=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004891=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb00000018=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000002879=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004556=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000007222=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000002612=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000007661=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008076=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004333=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004953=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005493=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004488=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000002122=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003636=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004415=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003522=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006686=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000006753=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007757=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004090=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000339=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004540=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001221=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000355=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004793=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005239=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005893=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008416=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004982=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001747=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004512=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005467=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000001044=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004805=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000004054=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001986=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001139=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006287=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004452=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000001327=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006965=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005814=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001865=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008192=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000006820=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000005637=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003731=duei.cF.bind(duei).call.bind(duei.cF.bind(duei),duei.cF.bind(duei));var cbb0000003566=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000008286=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000003833=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000004796=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007080=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000826=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007375=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001666=duei.cf.bind(duei).call.bind(duei.cf.bind(duei),duei.cf.bind(duei));var cbb0000003642=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000007459=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005346=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000005476=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000006727=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000444=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb000000935=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000004602=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));var cbb0000001988=duei.sf.bind(duei).call.bind(duei.sf.bind(duei),duei.sf.bind(duei));;while(!![]){let s_cbb=shuz[start++];if([null,oo4821,oo1,oo4817,oo4818,oo4,oo4809,oo4815,oo7,oo4810,oo9,oo4811,oo11,oo4785,oo4797,oo14,oo4793,oo16,oo4794,oo18,oo4786,oo4791,oo21,oo4787,oo23,oo4788,oo25,oo4730,oo4777,oo28,oo4764,oo30,oo4765,oo32,oo4750,oo4755,oo35,oo4751,oo37,oo4752,oo39,oo4731,oo4747,oo42,oo4742,oo44,oo4743,oo46,oo4732,oo4738,oo49,oo4733,oo51,oo4734,oo53,oo4630,oo4721,oo56,oo4718,oo4719,oo59,oo4688,oo4716,oo62,oo4689,oo64,oo4690,oo4691,oo4661,oo4675,oo69,oo4670,oo71,oo4671,oo73,oo4662,oo4668,oo76,oo4663,oo78,oo4664,oo80,oo4631,oo4659,oo83,oo4654,oo85,oo4655,oo87,oo4647,oo4652,oo90,oo4648,oo92,oo4649,oo94,oo4632,oo4645,oo97,oo4641,oo99,oo4642,oo101,oo4633,oo4639,oo104,oo4634,oo106,oo4635,oo108,oo4466,oo4621,oo111,oo4617,oo4618,oo114,oo4609,oo4615,oo117,oo4610,oo119,oo4611,oo121,oo4591,oo4604,oo124,oo4600,oo126,oo4601,oo128,oo4592,oo4598,oo131,oo4593,oo133,oo4594,oo135,oo4553,oo4589,oo138,oo4576,oo140,oo4577,oo142,oo4569,oo4574,oo145,oo4570,oo4573,oo4571,oo149,oo4554,oo4566,oo152,oo4562,oo154,oo4563,oo156,oo4555,oo4560,oo159,oo4556,oo161,oo4557,oo163,oo4467,oo4550,oo166,oo4546,oo168,oo4547,oo170,oo4528,oo4544,oo173,oo4529,oo175,oo4530,oo177,oo4509,oo4526,oo180,oo4520,oo182,oo4521,oo184,oo4510,oo4518,oo187,oo4511,oo189,oo4512,oo191,oo4468,oo4506,oo194,oo4490,oo196,oo4491,oo198,oo4483,oo4488,oo201,oo4484,oo203,oo4485,oo205,oo4469,oo4481,oo208,oo4477,oo210,oo4478,oo212,oo4470,oo4475,oo215,oo4471,oo217,oo4472,oo219,oo4064,oo4464,oo222,oo4452,oo4453,oo225,oo4442,oo4449,oo228,oo4443,oo230,oo4444,oo4445,oo4414,oo4429,oo235,oo4422,oo237,oo4423,oo239,oo4415,oo4420,oo242,oo4416,oo244,oo4417,oo246,oo4342,oo4412,oo249,oo4408,oo251,oo4409,oo253,oo4397,oo4403,oo256,oo4398,oo258,oo4399,oo260,oo4343,oo4384,oo263,oo4371,oo265,oo4372,oo267,oo4344,oo4360,oo270,oo4345,oo272,oo4346,oo274,oo4235,oo4340,oo277,oo4330,oo4331,oo280,oo4310,oo4328,oo283,oo4311,oo285,oo4312,oo287,oo4284,oo4308,oo290,oo4293,oo292,oo4294,oo294,oo4285,oo4290,oo297,oo4286,oo299,oo4287,oo301,oo4236,oo4282,oo304,oo4269,oo306,oo4270,oo308,oo4262,oo4267,oo311,oo4263,oo313,oo4264,oo315,oo4237,oo4253,oo318,oo4248,oo320,oo4249,oo322,oo4238,oo4244,oo325,oo4239,oo327,oo4240,oo329,oo4065,oo4233,oo332,oo4228,oo4229,oo335,oo4220,oo4225,oo338,oo4221,oo340,oo4222,oo342,oo4189,oo4218,oo345,oo4214,oo347,oo4215,oo349,oo4190,oo4195,oo352,oo4191,oo354,oo4192,oo356,oo4142,oo4187,oo359,oo4181,oo361,oo4182,oo363,oo4172,oo4179,oo366,oo4173,oo368,oo4174,oo370,oo4143,oo4170,oo373,oo4157,oo375,oo4158,oo377,oo4144,oo4155,oo4156,oo4145,oo382,oo4146,oo384,oo4066,oo4140,oo387,oo4125,oo389,oo4126,oo391,oo4116,oo4121,oo394,oo4117,oo396,oo4118,oo398,oo4098,oo4114,oo401,oo4109,oo403,oo4110,oo405,oo4099,oo4106,oo408,oo4100,oo410,oo4101,oo412,oo4067,oo4096,oo415,oo4092,oo417,oo4093,oo419,oo4085,oo4090,oo422,oo4086,oo424,oo4087,oo426,oo4068,oo4080,oo429,oo4076,oo431,oo4077,oo433,oo4069,oo4074,oo436,oo4070,oo438,oo4071,oo440,oo3364,oo4062,oo443,oo4059,oo4060,oo446,oo4051,oo4057,oo449,oo4052,oo451,oo4053,oo453,oo4024,oo4049,oo456,oo4044,oo458,oo4045,oo460,oo4025,oo4041,oo463,oo4026,oo465,oo4027,oo467,oo3985,oo4022,oo470,oo4018,oo472,oo4019,oo474,oo4001,oo4010,oo477,oo4002,oo479,oo4003,oo481,oo3986,oo3998,oo484,oo3994,oo486,oo3995,oo488,oo3987,oo3992,oo491,oo3988,oo493,oo3989,oo495,oo3899,oo3983,oo498,oo3979,oo3980,oo501,oo3966,oo3977,oo504,oo3967,oo506,oo3968,oo508,oo3934,oo3964,oo511,oo3960,oo513,oo3961,oo515,oo3935,oo3958,oo518,oo3936,oo520,oo3937,oo522,oo3900,oo3929,oo525,oo3925,oo527,oo3926,oo529,oo3917,oo3923,oo532,oo3918,oo534,oo3919,oo536,oo3901,oo3915,oo539,oo3910,oo541,oo3911,oo543,oo3902,oo3908,oo546,oo3903,oo548,oo3904,oo550,oo3738,oo3896,oo553,oo3881,oo3882,oo556,oo3865,oo3877,oo559,oo3866,oo561,oo3867,oo563,oo3850,oo3863,oo566,oo3859,oo568,oo3860,oo570,oo3851,oo3857,oo573,oo3852,oo575,oo3853,oo577,oo3809,oo3848,oo580,oo3844,oo582,oo3845,oo584,oo3825,oo3831,oo587,oo3826,oo589,oo3827,oo591,oo3810,oo3823,oo594,oo3819,oo596,oo3820,oo598,oo3811,oo3816,oo601,oo3812,oo603,oo3813,oo605,oo3739,oo3807,oo608,oo3802,oo610,oo3803,oo612,oo3791,oo3800,oo615,oo3792,oo617,oo3793,oo619,oo3773,oo3788,oo622,oo3784,oo624,oo3785,oo626,oo3774,oo3779,oo629,oo3775,oo631,oo3776,oo633,oo3740,oo3770,oo636,oo3764,oo638,oo3765,oo640,oo3755,oo3761,oo643,oo3756,oo645,oo3757,oo647,oo3741,oo3753,oo650,oo3749,oo652,oo3750,oo654,oo3742,oo3747,oo657,oo3743,oo659,oo3744,oo661,oo3365,oo3736,oo664,oo3733,oo3734,oo667,oo3724,oo3730,oo670,oo3725,oo672,oo3726,oo674,oo3705,oo3722,oo677,oo3716,oo679,oo3717,oo681,oo3706,oo3711,oo684,oo3707,oo686,oo3708,oo688,oo3670,oo3703,oo691,oo3693,oo693,oo3694,oo695,oo3686,oo3691,oo698,oo3687,oo700,oo3688,oo702,oo3671,oo3683,oo705,oo3679,oo707,oo3680,oo709,oo3672,oo3677,oo712,oo3673,oo714,oo3674,oo716,oo3551,oo3668,oo719,oo3659,oo721,oo3660,oo3661,oo3642,oo3657,oo726,oo3643,oo728,oo3644,oo730,oo3623,oo3638,oo3639,oo3631,oo735,oo3632,oo737,oo3624,oo3629,oo740,oo3625,oo742,oo3626,oo744,oo3552,oo3615,oo747,oo3608,oo749,oo3609,oo751,oo3584,oo3606,oo754,oo3585,oo756,oo3586,oo758,oo3553,oo3582,oo761,oo3562,oo763,oo3563,oo765,oo3554,oo3559,oo768,oo3555,oo770,oo3556,oo772,oo3366,oo3549,oo775,oo3546,oo3547,oo778,oo3512,oo3537,oo781,oo3513,oo783,oo3514,oo785,oo3498,oo3510,oo788,oo3506,oo790,oo3507,oo792,oo3499,oo3504,oo795,oo3500,oo797,oo3501,oo799,oo3459,oo3496,oo802,oo3492,oo804,oo3493,oo806,oo3476,oo3490,oo809,oo3477,oo811,oo3478,oo813,oo3460,oo3474,oo816,oo3469,oo818,oo3470,oo820,oo3461,oo3467,oo823,oo3462,oo825,oo3463,oo827,oo3367,oo3457,oo830,oo3453,oo832,oo3454,oo834,oo3446,oo3451,oo837,oo3447,oo839,oo3448,oo841,oo3420,oo3442,oo844,oo3438,oo846,oo3439,oo848,oo3421,oo3436,oo851,oo3422,oo853,oo3423,oo855,oo3368,oo3416,oo858,oo3411,oo860,oo3412,oo862,oo3395,oo3403,oo865,oo3396,oo867,oo3397,oo869,oo3369,oo3393,oo872,oo3389,oo874,oo3390,oo876,oo3370,oo3386,oo879,oo3371,oo881,oo3372,oo883,oo1771,oo3362,oo886,oo3359,oo3360,oo889,oo3332,oo3346,oo892,oo3333,oo894,oo3334,oo896,oo3302,oo3330,oo899,oo3320,oo901,oo3321,oo903,oo3303,oo3317,oo906,oo3304,oo908,oo3305,oo910,oo3247,oo3289,oo913,oo3283,oo915,oo3284,oo917,oo3274,oo3281,oo920,oo3275,oo922,oo3276,oo924,oo3248,oo3263,oo927,oo3258,oo929,oo3259,oo931,oo3249,oo3255,oo934,oo3250,oo936,oo3251,oo938,oo3176,oo3244,oo941,oo3241,oo3242,oo944,oo3233,oo3239,oo947,oo3234,oo949,oo3235,oo951,oo3219,oo3231,oo954,oo3227,oo956,oo3228,oo958,oo3220,oo3225,oo961,oo3221,oo963,oo3222,oo965,oo3177,oo3217,oo968,oo3213,oo970,oo3214,oo972,oo3205,oo3210,oo975,oo3206,oo977,oo3207,oo979,oo3178,oo3197,oo982,oo3193,oo984,oo3194,oo986,oo3179,oo3190,oo989,oo3180,oo991,oo3181,oo993,oo2956,oo3167,oo996,oo3163,oo3164,oo999,oo3146,oo3160,oo1002,oo3147,oo1004,oo3148,oo1006,oo3112,oo3135,oo1009,oo3120,oo1011,oo3121,oo1013,oo3113,oo3118,oo1016,oo3114,oo1018,oo3115,oo3116,oo3074,oo3110,oo1023,oo3097,oo1025,oo3098,oo1027,oo3090,oo3095,oo1030,oo3091,oo1032,oo3092,oo1034,oo3075,oo3088,oo1037,oo3084,oo1039,oo3085,oo1041,oo3076,oo3082,oo1044,oo3077,oo1046,oo3078,oo1048,oo2957,oo3070,oo1051,oo3064,oo1053,oo3065,oo1055,oo3049,oo3061,oo1058,oo3050,oo1060,oo3051,oo1062,oo3014,oo3036,oo1065,oo3031,oo1067,oo3032,oo1069,oo3015,oo3021,oo1072,oo3016,oo1074,oo3017,oo1076,oo2958,oo3011,oo1079,oo3006,oo1081,oo3007,oo1083,oo2992,oo2997,oo2998,oo2993,oo1088,oo2994,oo1090,oo2959,oo2989,oo1093,oo2985,oo1095,oo2986,oo1097,oo2960,oo2972,oo1100,oo2961,oo1102,oo2962,oo1104,oo2543,oo2954,oo1107,oo2951,oo2952,oo1110,oo2937,oo2942,oo1113,oo2938,oo1115,oo2939,oo1117,oo2921,oo2935,oo1120,oo2930,oo1122,oo2931,oo1124,oo2922,oo2928,oo1127,oo2923,oo1129,oo2924,oo1131,oo2867,oo2918,oo1134,oo2913,oo2917,oo2914,oo1138,oo2898,oo2904,oo1141,oo2899,oo1143,oo2900,oo1145,oo2868,oo2896,oo1148,oo2885,oo1150,oo2886,oo1152,oo2869,oo2874,oo1155,oo2870,oo1157,oo2871,oo1159,oo2779,oo2863,oo1162,oo2860,oo2861,oo1165,oo2852,oo2858,oo1168,oo2853,oo1170,oo2854,oo1172,oo2821,oo2850,oo1175,oo2842,oo1177,oo2843,oo1179,oo2822,oo2837,oo1182,oo2823,oo1184,oo2824,oo1186,oo2780,oo2818,oo1189,oo2814,oo1191,oo2815,oo1193,oo2806,oo2812,oo1196,oo2807,oo1198,oo2808,oo1200,oo2781,oo2804,oo1203,oo2800,oo1205,oo2801,oo1207,oo2782,oo2798,oo1210,oo2783,oo1212,oo2784,oo1214,oo2544,oo2765,oo1217,oo2762,oo2763,oo1220,oo2755,oo2760,oo1223,oo2756,oo1225,oo2757,oo1227,oo2731,oo2752,oo1230,oo2746,oo1232,oo2747,oo1234,oo2732,oo2743,oo1237,oo2733,oo1239,oo2734,oo1241,oo2674,oo2727,oo1244,oo2722,oo1246,oo2723,oo1248,oo2715,oo2720,oo1251,oo2716,oo1253,oo2717,oo1255,oo2675,oo2713,oo1258,oo2709,oo1260,oo2710,oo1262,oo2676,oo2699,oo1265,oo2677,oo1267,oo2678,oo1269,oo2545,oo2668,oo1272,oo2656,oo1274,oo2657,oo1276,oo2629,oo2645,oo1279,oo2630,oo1281,oo2631,oo1283,oo2606,oo2627,oo1286,oo2620,oo1288,oo2621,oo1290,oo2607,oo2618,oo1293,oo2608,oo1295,oo2609,oo1297,oo2546,oo2604,oo1300,oo2588,oo1302,oo2589,oo1304,oo2579,oo2585,oo1307,oo2580,oo1309,oo2581,oo1311,oo2547,oo2576,oo1314,oo2571,oo1316,oo2572,oo1318,oo2548,oo2569,oo1321,oo2549,oo1323,oo2550,oo1325,oo1772,oo2541,oo1328,oo2537,oo2538,oo1331,oo2529,oo2534,oo1334,oo2530,oo1336,oo2531,oo1338,oo2506,oo2527,oo1341,oo2515,oo1343,oo2516,oo1345,oo2507,oo2512,oo1348,oo2508,oo1350,oo2509,oo1352,oo2465,oo2504,oo1355,oo2500,oo1357,oo2501,oo1359,oo2489,oo2498,oo1362,oo2490,oo1364,oo2491,oo1366,oo2466,oo2481,oo1369,oo2476,oo1371,oo2477,oo1373,oo2467,oo2472,oo1376,oo2468,oo1378,oo2469,oo1380,oo2375,oo2463,oo1383,oo2460,oo2461,oo1386,oo2449,oo2457,oo1389,oo2450,oo1391,oo2451,oo1393,oo2418,oo2447,oo1396,oo2443,oo1398,oo2444,oo1400,oo2419,oo2440,oo1403,oo2420,oo1405,oo2421,oo1407,oo2376,oo2416,oo1410,oo2411,oo1412,oo2412,oo1414,oo2397,oo2409,oo1417,oo2398,oo1419,oo2399,oo1421,oo2377,oo2395,oo1424,oo2391,oo1426,oo2392,oo1428,oo2378,oo2389,oo1431,oo2379,oo1433,oo2380,oo1435,oo2180,oo2373,oo1438,oo2370,oo2371,oo1441,oo2362,oo2368,oo1444,oo2363,oo1446,oo2364,oo1448,oo2324,oo2356,oo1451,oo2352,oo1453,oo2353,oo1455,oo2325,oo2350,oo1458,oo2326,oo1460,oo2327,oo1462,oo2285,oo2322,oo1465,oo2318,oo1467,oo2319,oo1469,oo2308,oo2313,oo1472,oo2309,oo1474,oo2310,oo1476,oo2286,oo2305,oo1479,oo2301,oo1481,oo2302,oo1483,oo2287,oo2299,oo1486,oo2288,oo1488,oo2289,oo1490,oo2181,oo2283,oo1493,oo2272,oo1495,oo2273,oo1497,oo2258,oo2270,oo1500,oo2259,oo1502,oo2260,oo1504,oo2233,oo2255,oo1507,oo2250,oo1509,oo2251,oo1511,oo2234,oo2239,oo1514,oo2235,oo1516,oo2236,oo1518,oo2182,oo2231,oo1521,oo2227,oo1523,oo2228,oo1525,oo2213,oo2219,oo1528,oo2214,oo1530,oo2215,oo1532,oo2183,oo2210,oo1535,oo2197,oo2209,oo2198,oo1539,oo2184,oo2195,oo1542,oo2185,oo1544,oo2186,oo1546,oo1773,oo2177,oo1549,oo2173,oo2174,oo1552,oo2151,oo2158,oo1555,oo2152,oo1557,oo2153,oo1559,oo2129,oo2142,oo1562,oo2137,oo1564,oo2138,oo1566,oo2130,oo2135,oo1569,oo2131,oo1571,oo2132,oo1573,oo2089,oo2119,oo1576,oo2115,oo1578,oo2116,oo1580,oo2106,oo2111,oo1583,oo2107,oo1585,oo2108,oo1587,oo2090,oo2102,oo1590,oo2098,oo1592,oo2099,oo1594,oo2091,oo2096,oo1597,oo2092,oo1599,oo2093,oo1601,oo1975,oo2083,oo1604,oo2079,oo1606,oo2080,oo1608,oo2071,oo2077,oo1611,oo2072,oo1613,oo2073,oo1615,oo2034,oo2065,oo1618,oo2060,oo1620,oo2061,oo1622,oo2035,oo2055,oo1625,oo2036,oo1627,oo2037,oo1629,oo1976,oo2032,oo1632,oo2027,oo1634,oo2028,oo1636,oo2007,oo2025,oo1639,oo2008,oo1641,oo2009,oo1643,oo1977,oo2005,oo1646,oo1987,oo1648,oo1988,oo1650,oo1978,oo1984,oo1653,oo1979,oo1655,oo1980,oo1657,oo1774,oo1972,oo1660,oo1969,oo1970,oo1663,oo1962,oo1967,oo1666,oo1963,oo1966,oo1964,oo1670,oo1922,oo1950,oo1673,oo1935,oo1675,oo1936,oo1677,oo1923,oo1932,oo1680,oo1924,oo1682,oo1925,oo1684,oo1860,oo1920,oo1687,oo1910,oo1689,oo1911,oo1691,oo1893,oo1907,oo1694,oo1894,oo1696,oo1895,oo1698,oo1861,oo1890,oo1701,oo1869,oo1703,oo1870,oo1705,oo1862,oo1867,oo1708,oo1863,oo1710,oo1864,oo1712,oo1775,oo1858,oo1715,oo1854,oo1717,oo1855,oo1719,oo1846,oo1851,oo1722,oo1847,oo1724,oo1848,oo1726,oo1812,oo1841,oo1729,oo1836,oo1731,oo1837,oo1733,oo1813,oo1825,oo1736,oo1814,oo1738,oo1815,oo1740,oo1776,oo1810,oo1743,oo1799,oo1745,oo1800,oo1747,oo1791,oo1796,oo1750,oo1792,oo1752,oo1793,oo1754,oo1777,oo1789,oo1757,oo1785,oo1759,oo1786,oo1761,oo1778,oo1783,oo1764,oo1779,oo1766,oo1780,oo1768,oo1769].indexOf(s_cbb)==-oo4828){return"-90_cbb";}else if(s_cbb<oo1771){if(s_cbb<oo1772){if(s_cbb<oo1773){if(s_cbb<oo1774){if(s_cbb<oo1775){if(s_cbb<oo1776){s_cbb<oo1777?s_cbb<oo1778?s_cbb<oo1779?s_cbb<oo1780?s_cbb<oo1781?(a1=cbb0000006441(a3),a2=cbb0000006442(a4),a1=a1^a2,a5=cbb0000006443(a1),a1=cbb0000006444(a2),a2=cbb0000006445(a1),a1=a1/a2,a3=cbb0000006446(a1)):(a5=cbb0000003799(a3),a1=cbb0000003800(typeof a5),a1=cbb0000003801(a3),a2=cbb0000003802(a2),a1=a1<<a2,a1=cbb0000003803(a1)):s_cbb<oo1782?(a1=cbb0000006777(a5),a2=cbb0000006778(a6),a1=a2*a1,a7=cbb0000006779(a1),a1=cbb0000006780(a6),a2=cbb0000006781(a1),a1=a1>>>a2,a2=cbb0000006782(a1)):(a5=cbb0000007965(a1),a2=cbb0000007966(void a5),a1=shuz[start++],a2=cbb0000007967(a1)):s_cbb<oo1783?s_cbb<oo1784?(function(){a1=cbb0000001675(a2);a2=cbb0000001676(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001677(a1);}(),a1=cbb0000001678(a5),a2=cbb0000001679(a6),a1=a2!==a1,a1=cbb0000001680(a1)):(a1=cbb0000003058(a5),a2=cbb0000003059(a6),a1=a2!==a1,a1=cbb0000003060(a1),a1=cbb0000003061(cbbb),a2=cbb0000003062(a1),a1=a1<=a2,a5=cbb0000003063(a1)):(a1=cbb0000007734(a3),a2=cbb0000007735(a4),a1=a1%a2,a5=cbb0000007736(a1),a1=cbb0000007737(a4),a2=cbb0000007738(a5),a1=a1-a2,a6=cbb0000007739(a1)):s_cbb<oo1785?s_cbb<oo1786?s_cbb<oo1787?(a1=shuz[start++],a2=cbb0000007941(a1),a1=cbb0000007942(a2),a2=cbb0000007943(a1),a1=a2==a1,a4=cbb0000007944(a1)):(a1=cbb0000003196(a2),a2=cbb0000003197(a1),a1=a2===a1,a3=cbb0000003198(a1),a5=cbb0000003199(a3),a4=cbb0000003200(!a5)):s_cbb<oo1788?(a5=cbb0000004562(a3),a1=cbb0000004563(~a5),a1=cbb0000004564(a2),a2=cbb0000004565(a1),a1=a2==a1,a4=cbb0000004566(a1)):(a1=cbb0000004670(a3),a2=cbb0000004671(a1),a2.push(a1),a1=cbb0000004672(a2),a1=cbb0000004673(a3),a2=cbb0000004674(a1),a1=a1<a2,a1=cbb0000004675(a1)):s_cbb<oo1789?s_cbb<oo1790?(a5=cbb0000004460(a3),a1=cbb0000004461(~a5),a1=cbb0000004462(a2),a2=cbb0000004463(a1),a1=a2===a1,a3=cbb0000004464(a1)):(a1=cbb0000004707(a3),a2=cbb0000004708(a1),a2.push(a1),a1=cbb0000004709(a2),a1=cbb0000004710(a4),a2=cbb0000004711(a5),a1=a1-a2,a6=cbb0000004712(a1)):(a1=cbb0000007719(a2),a2=cbb0000007720(a1),a1=a1+a2,a1=cbb0000007721(a1),a2=cbb0000007722([])):s_cbb<oo1791?s_cbb<oo1792?s_cbb<oo1793?s_cbb<oo1794?(a5=cbb0000004440(a1),a2=cbb0000004441(void a5),a1=cbb0000004442(a2),a2=cbb0000004443(a3),a3=cbb0000004444(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004445(a3)):(a2=cbb0000003998(allthis),a1=cbb0000003999(a2),a2=cbb0000004000(a1),a1=a1/a2,a3=cbb0000004001(a1)):s_cbb<oo1795?(a1=cbb0000006794(a3),a2=cbb0000006795(a3),a1=a1>a2,a2=cbb0000006796(a1),a1=cbb0000006797(a5),a2=cbb0000006798(a6),a1=a2*a1,a7=cbb0000006799(a1)):(a1=cbb0000001617(a3),a2=cbb0000001618(a1),a1=a1<a2,a1=cbb0000001619(a1),function(){a1=cbb0000001620(a2);a2=cbb0000001621(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001622(a1);}()):s_cbb<oo1796?s_cbb<oo1797?(a1=cbb0000001385(a6),a2=cbb0000001386(a7),a1=a1|a2,a8=cbb0000001387(a1),a1=cbb0000001388(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):(a1=cbb0000005601(a6),a2=cbb0000005602(a1),a1=a1>>>a2,a2=cbb0000005603(a1),a2=cbb0000005604(allthis)):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008135(a1),a1=cbb0000008136(a5),a2=cbb0000008137(a6),a1=a2*a1,a7=cbb0000008138(a1)):s_cbb<oo1799?s_cbb<oo1800?s_cbb<oo1801?(a1=cbb0000004749(a2),a2=cbb0000004750(a3),a1=a1&a2,a3=cbb0000004751(a1),a1=cbb0000004752(a3),a2=cbb0000004753(a1),a2.push(a1),a1=cbb0000004754(a2)):(all=cbbb,a1=cbb0000007338(cbbb),a5=cbb0000007339(a1),a2=cbb0000007340(void a5)):s_cbb<oo1802?(a1=shuz[start++],a2=cbb0000007904(a1),a1=cbb0000007905(a3),a2=cbb0000007906(a2),a1=a1<<a2,a1=cbb0000007907(a1)):(a1=cbb0000004226(a5),a2=cbb0000004227(a6),a1=a2!==a1,a1=cbb0000004228(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004229(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004230(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004231(a4);}()):s_cbb<oo1810?s_cbb<oo1811?(a1=cbb0000004776(a2),a2=cbb0000004777(a1),a1=a2==a1,a4=cbb0000004778(a1),a1=cbb0000004779(a5),a2=cbb0000004780(a6),a1=a2*a1,a7=cbb0000004781(a1)):(a1=cbb0000007764({}),a1=cbb0000007765(a4),a2=cbb0000007766(a5),a1=a1-a2,a6=cbb0000007767(a1)):(a1=shuz[start++],a2=cbb0000007490(a3),a3=cbb0000007491(a1),a2[constantPool[a1]]=a3,function(){debugger;}());}else if(s_cbb<oo1812){if(s_cbb<oo1813){if(s_cbb<oo1814){if(s_cbb<oo1815){if(s_cbb<oo1816){a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001258(a5);a2=cbb0000001259(a6);a1=a2*a1;a7=cbb0000001260(a1);}else{(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008278(a2);cbbb[a9]=argsList[a2];}})();a1=cbb0000008279(a2);a2=cbb0000008280(a1);a1=a1/a2;a3=cbb0000008281(a1);}}else s_cbb<oo1824?(a1=cbb0000002366(a5),a2=cbb0000002367(a6),a1=a2!==a1,a1=cbb0000002368(a1),a5=cbb0000002369(a3),a4=cbb0000002370(!a5)):(a1=cbb0000007345(a3),a2=cbb0000007346(a4),a1=a1>>a2,a5=cbb0000007347(a1),all=cbbb,a1=cbb0000007348(cbbb));}else s_cbb<oo1825?s_cbb<oo1826?(a1=cbb0000003925(a6),a2=cbb0000003926(a7),a1=a1|a2,a8=cbb0000003927(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003928()):a4.splice(oo4806,oo4806,cbb0000003929());}a1=cbb0000003930(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a1=cbb0000004357(a2),a2=cbb0000004358(a3),a3=cbb0000004359(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004360(a3),a1=cbb0000004361(a6),a2=cbb0000004362(a7),a1=a1|a2,a8=cbb0000004363(a1)):(a1=cbb0000003140(cbbb),a2=cbb0000003141(a1),a1=a1<=a2,a5=cbb0000003142(a1),a1=cbb0000003143(a2),a2=cbb0000003144(a1),a1=a2===a1,a3=cbb0000003145(a1));}else s_cbb<oo1836?s_cbb<oo1837?s_cbb<oo1838?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008274(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008275(a3),a2=cbb0000008276(a1),a1=a1<a2,a1=cbb0000008277(a1)):(a1=cbb000000537(a5),a2=cbb000000538(a3),a3=cbb000000539(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000540(a6),a2=cbb000000541(a7),a1=a1|a2,a8=cbb000000542(a1)):s_cbb<oo1840?(a1=cbb0000005930(a6),a2=cbb0000005931(a7),a1=a1|a2,a8=cbb0000005932(a1),a1=cbb0000005933(a6),a2=cbb0000005934(a1),a1=a1>>>a2,a2=cbb0000005935(a1)):(a1=cbb0000008150(),a2=cbb0000008151(),a1=a2!=a1,cbb0000008152(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008153(a1)):s_cbb<oo1841?s_cbb<oo1842?(a1=cbb0000007624(a2),a2=cbb0000007625(a1),a1=a1+a2,a1=cbb0000007626(a1),a1=cbb0000007627(a5),a2=cbb0000007628(a6),a1=a2!==a1,a1=cbb0000007629(a1)):(a1=cbb000000831(a5),a2=cbb000000832(a6),a1=a2!==a1,a1=cbb000000833(a1),a1=cbb000000834(a5),a2=cbb000000835(a9),a1=a2<a1,a6=cbb000000836(a1)):(a1=cbb0000001758(a3),a2=cbb0000001759(a4),a1=a1>>a2,a5=cbb0000001760(a1),function(){a1=shuz[start++];a3=cbb0000001761(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001762(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001763(a4);}());}else if(s_cbb<oo1846){if(s_cbb<oo1847){s_cbb<oo1848?s_cbb<oo1849?(a1=cbb0000005755(a6),a2=cbb0000005756(a1),a1=a1>>>a2,a2=cbb0000005757(a1),a1=cbb0000005758(a2),a2=cbb0000005759(a1),a1=a2-a1,a1=cbb0000005760(a1)):(a1=cbb000000180({}),a1=cbb000000181(),a2=cbb000000182(),a1=a2!=a1,cbb000000183(a1)):s_cbb<oo1850?(a1=cbb0000005444(a2),a2=cbb0000005445(a3),a1=a1&a2,a3=cbb0000005446(a1),a1=cbb0000005447(a2),a2=cbb0000005448(a3),a1=a1&a2,a3=cbb0000005449(a1)):(a1=cbb0000008203(a6),a2=cbb0000008204(a7),a1=a1|a2,a8=cbb0000008205(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008206(a1));}else if(s_cbb<oo1851){if(s_cbb<oo1852){a2=cbb0000004077(allthis);a1=cbb0000004078(a3);a2=cbb0000004079(a1);a1=a1<a2;a1=cbb0000004080(a1);}else{a1=cbb000000759(a5);a2=cbb000000760(a6);a1=a2!==a1;a1=cbb000000761(a1);a1=cbb000000762(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000763(a1)):a9=oo4829;;}}else{a1=cbb0000004932(a6);a2=cbb0000004933(a7);a1=a1|a2;a8=cbb0000004934(a1);a1=cbb0000004935(a3);a2=cbb0000004936(a3);a1=a1>a2;a2=cbb0000004937(a1);}}else if(s_cbb<oo1854){s_cbb<oo1855?s_cbb<oo1856?(function(){a1=cbb0000002467(a2);throw a1;}(),a1=cbb0000002468(a3),a2=cbb0000002469(a4),a1=a1>>a2,a5=cbb0000002470(a1)):(a1=cbb0000007758(a4),a2=cbb0000007759(a5),a1=a1-a2,a6=cbb0000007760(a1),a1=cbb0000007761(a5),a2=cbb0000007762(a9),a1=a2<a1,a6=cbb0000007763(a1)):s_cbb<oo1857?(a1=cbb0000004014(a4),a2=cbb0000004015(a1),a1=a1>=a2,a2=cbb0000004016(a1),a2=cbb0000004017(allthis)):(a1=cbb0000008139(a2),a2=cbb0000008140(a3),a1=a1&a2,a3=cbb0000008141(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008142(a1));}else if(s_cbb<oo1858){if(s_cbb<oo1859){a1=cbb0000005918(a3);a2=cbb0000005919(a4);a1=a1>>a2;a5=cbb0000005920(a1);a1=cbb0000005921(a2);a2=cbb0000005922(a1);a1=a2==a1;a4=cbb0000005923(a1);}else{a1=cbb0000001053(a2);a2=cbb0000001054(a1);a1=a1+a2;a1=cbb0000001055(a1);return;}}else{a1=cbb0000002897(a3);a2=cbb0000002898(a1);a3=delete a2[a1];a1=cbb0000002899(a3);a2=cbb0000002900(allthis);}}else if(s_cbb<oo1860){if(s_cbb<oo1861){if(s_cbb<oo1862){s_cbb<oo1863?s_cbb<oo1864?s_cbb<oo1865?(a1=cbb0000006027(a6),a2=cbb0000006028(a7),a1=a1|a2,a8=cbb0000006029(a1),a1=cbb0000006030(a2),a2=cbb0000006031(a1),a1=a1/a2,a3=cbb0000006032(a1)):(a5=cbb0000003763(a3),a1=cbb0000003764(typeof a5),a1=cbb0000003765({})):s_cbb<oo1866?(a1=cbb0000008002(a2),a2=cbb0000008003(a3),a1=a1&a2,a3=cbb0000008004(a1),a1=shuz[start++],a2=cbb0000008005(a1)):(function(){debugger;}(),a1=cbb0000006474(a3),a2=cbb0000006475(a4),a1=a1^a2,a5=cbb0000006476(a1)):s_cbb<oo1867?s_cbb<oo1868?(a1=cbb0000007179(a3),a2=cbb0000007180(a4),a1=a1>>a2,a5=cbb0000007181(a1),a1=cbb0000007182(a3),a2=cbb0000007183(a1),a1=a1<a2,a1=cbb0000007184(a1)):(a1=cbb0000004626(a3),a2=cbb0000004627(a1),a2.push(a1),a1=cbb0000004628(a2),a1=cbb0000004629(a5),a2=cbb0000004630(a6),a1=a2!==a1,a1=cbb0000004631(a1)):(a1=cbb0000002020(a3),a2=cbb0000002021(a4),a1=a1^a2,a5=cbb0000002022(a1),a1=cbb0000002023({}));}else if(s_cbb<oo1869){if(s_cbb<oo1870){if(s_cbb<oo1871){a1=cbb000000749(a2);a2=cbb000000750(a1);a1=a2===a1;a3=cbb000000751(a1);a1=cbb000000752(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000753(a1)):a9=oo4829;;}else{a1=cbb0000003118(a2);a2=cbb0000003119(a1);a1=a2===a1;a3=cbb0000003120(a1);a2=cbb0000003121(allthis);}}else s_cbb<oo1873?(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003961()):a4.splice(oo4806,oo4806,cbb0000003962());}a1=cbb0000003963(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003964(a2),a2=cbb0000003965(a1),a1=a2==a1,a4=cbb0000003966(a1)):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004179(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004180(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004181(a4);}(),a1=cbb0000004182(a3),a2=cbb0000004183(a2),a1=a1<<a2,a1=cbb0000004184(a1));}else s_cbb<oo1890?s_cbb<oo1891?(a1=cbb0000001389(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001390({})):(a1=cbb0000008103(a3),a2=cbb0000008104(a4),a1=a1^a2,a5=cbb0000008105(a1),a1=shuz[start++],a2=cbb0000008106(constantPool[a1])):(a1=cbb0000006226(a3),a2=cbb0000006227(a2),a1=a1<<a2,a1=cbb0000006228(a1),a1=cbb0000006229(a3),a2=cbb0000006230(a4),a1=a1%a2,a5=cbb0000006231(a1));}else if(s_cbb<oo1893){if(s_cbb<oo1894){s_cbb<oo1895?s_cbb<oo1896?(a1=cbb0000001119(a6),a2=cbb0000001120(a1),a1=a1>>>a2,a2=cbb0000001121(a1),j=cbb0000001122(a1),j2=cbb0000001123(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a1=cbb0000002459(a3),a2=cbb0000002460(a2),a1=a1<<a2,a1=cbb0000002461(a1),function(){a1=cbb0000002462(a2);throw a1;}()):s_cbb<oo1897?(a1=cbb0000001663(a3),a2=cbb0000001664(a1),a1=a1<a2,a1=cbb0000001665(a1),function(){a1=cbb0000001666(a2);a2=cbb0000001667(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001668(a1);}()):(a1=cbb0000003859(a3),a2=cbb0000003860(a1),a1=a1<a2,a1=cbb0000003861(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003862()):a4.splice(oo4806,oo4806,cbb0000003863());}a1=cbb0000003864(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}());}else if(s_cbb<oo1907){if(s_cbb<oo1908){a1=cbb000000744(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000745(a1)):a9=oo4829;;a1=cbb000000746(a3);a2=cbb000000747(a1);a1=a1<a2;a1=cbb000000748(a1);}else{a1=cbb000000858(a3);a2=cbb000000859(a4);a1=a1%a2;a5=cbb000000860(a1);a1=cbb000000861(a5);a2=cbb000000862(a9);a1=a2<a1;a6=cbb000000863(a1);}}else{(function(){i=[];a2=cbb0000002603(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;})();a1=cbb0000002604(a4);a2=cbb0000002605(a5);a1=a1-a2;a6=cbb0000002606(a1);}}else if(s_cbb<oo1910){if(s_cbb<oo1911){if(s_cbb<oo1912){a1=cbb0000007913(a3);a2=cbb0000007914(a3);a1=a1>a2;a2=cbb0000007915(a1);a1=shuz[start++];a2=cbb0000007916(a1);}else{a1=cbb0000001236(a2);a2=cbb0000001237(a1);a1=a2==a1;a4=cbb0000001238(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo1919?(a1=cbb0000007538(a5),a2=cbb0000007539(a6),a1=a2*a1,a7=cbb0000007540(a1),a1=shuz[start++],a2=cbb0000007541(a3),a3=cbb0000007542(a1),a2[constantPool[a1]]=a3):(a5=cbb0000002311(a3),a4=cbb0000002312(!a5),a1=cbb0000002313(),a2=cbb0000002314(),a1=a2!=a1,cbb0000002315(a1));}else s_cbb<oo1920?s_cbb<oo1921?(a1=cbb0000003064(a5),a2=cbb0000003065(a6),a1=a2!==a1,a1=cbb0000003066(a1),a1=cbb0000003067(),a2=cbb0000003068(),a1=a2!=a1,cbb0000003069(a1)):(a1=cbb0000005339(a3),a2=cbb0000005340(a1),a1=a1<a2,a1=cbb0000005341(a1),a1=cbb0000005342(cbbb),a2=cbb0000005343(a1),a1=a1<=a2,a5=cbb0000005344(a1)):(a5=cbb0000006513(a1),a2=cbb0000006514(void a5),a1=cbb0000006515(a3),a2=cbb0000006516(a4),a1=a1^a2,a5=cbb0000006517(a1));}else if(s_cbb<oo1922){if(s_cbb<oo1923){s_cbb<oo1924?s_cbb<oo1925?s_cbb<oo1926?(a1=cbb0000005888(a3),a2=cbb0000005889(a4),a1=a1>>a2,a5=cbb0000005890(a1),a1=cbb0000005891(a6),a2=cbb0000005892(a7),a1=a1|a2,a8=cbb0000005893(a1)):(a1=cbb0000008321(a3),a2=cbb0000008322(a2),a1=a1<<a2,a1=cbb0000008323(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008324(a2);cbbb[a9]=argsList[a2];}}()):s_cbb<oo1928?(function(){i=[];a2=cbb0000002545(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002546(a3),a2=cbb0000002547(a4),a1=a1^a2,a5=cbb0000002548(a1)):(function(){a1=shuz[start++];a3=cbb0000001833(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001834(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001835(a4);}(),a1=cbb0000001836(a2),a2=cbb0000001837(a1),a1=a1+a2,a1=cbb0000001838(a1)):s_cbb<oo1932?s_cbb<oo1933?(a1=cbb0000008256({}),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008257(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb000000510(a2),a2=cbb000000511(a1),a1=a2===a1,a3=cbb000000512(a1),a1=cbb000000513(a5),a2=cbb000000514(a3),a3=cbb000000515(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb0000006121(a3),a2=cbb0000006122(a4),a1=a1%a2,a5=cbb0000006123(a1),a1=cbb0000006124(a3),a2=cbb0000006125(a2),a1=a1<<a2,a1=cbb0000006126(a1));}else if(s_cbb<oo1935){if(s_cbb<oo1936){s_cbb<oo1937?(a1=cbb0000006190(a2),a2=cbb0000006191(a1),a1=a1+a2,a1=cbb0000006192(a1),a1=cbb0000006193(a3),a2=cbb0000006194(a2),a1=a1<<a2,a1=cbb0000006195(a1)):(a1=cbb0000002104({}),a1=cbb0000002105(cbbb),a2=cbb0000002106(a1),a1=a1<=a2,a5=cbb0000002107(a1));}else if(s_cbb<oo1938){a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008367(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008368(a6);a2=cbb0000008369(a1);a1=a1>>>a2;a2=cbb0000008370(a1);}else{a1=cbb000000233();a2=cbb000000234();a1=a2!=a1;cbb000000235(a1);(function(){debugger;})();}}else s_cbb<oo1950?s_cbb<oo1951?(function(){a1=shuz[start++];a3=cbb0000001746(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001747(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001748(a4);}(),a1=cbb0000001749(a3),a2=cbb0000001750(a3),a1=a1>a2,a2=cbb0000001751(a1)):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004260(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004261(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004262(a4);}(),a1=cbb0000004263(a3),a2=cbb0000004264(a4),a1=a1^a2,a5=cbb0000004265(a1)):(a1=cbb000000608(a4),a2=cbb000000609(a1),a1=a1>=a2,a2=cbb000000610(a1),a1=cbb000000611(a5),a2=cbb000000612(a3),a3=cbb000000613(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1);}else if(s_cbb<oo1962){s_cbb<oo1963?s_cbb<oo1964?s_cbb<oo1965?(a5=cbb0000002361(a3),a4=cbb0000002362(!a5),a1=cbb0000002363(a3),a2=cbb0000002364(a4),a1=a1%a2,a5=cbb0000002365(a1)):(a1=cbb0000006705(a3),a2=cbb0000006706(a4),a1=a1%a2,a5=cbb0000006707(a1),a1=cbb0000006708(a5),a2=cbb0000006709(a6),a1=a2*a1,a7=cbb0000006710(a1)):s_cbb<oo1966?(a1=cbb0000001152(a5),a2=cbb0000001153(a6),a1=a2*a1,a7=cbb0000001154(a1),j=cbb0000001155(a1),j2=cbb0000001156(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a1=cbb0000003096(a6),a2=cbb0000003097(a1),a1=a1>>>a2,a2=cbb0000003098(a1),a1=cbb0000003099(a5),a2=cbb0000003100(a6),a1=a2!==a1,a1=cbb0000003101(a1)):s_cbb<oo1967?s_cbb<oo1968?(a1=cbb0000004426(a2),a2=cbb0000004427(a3),a3=cbb0000004428(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004429(a3),a1=cbb0000004430(a2),a2=cbb0000004431(a1),a1=a2===a1,a3=cbb0000004432(a1)):(a5=cbb0000003786(a3),a1=cbb0000003787(typeof a5),a1=cbb0000003788({})):(a1=cbb0000007507({}),a1=shuz[start++],a2=cbb0000007508(a3),a3=cbb0000007509(a1),a2[constantPool[a1]]=a3);}else if(s_cbb<oo1969){s_cbb<oo1970?s_cbb<oo1971?(a2=cbb0000007699(allthis),a1=cbb0000007700(a2),a2=cbb0000007701(a1),a1=a1+a2,a1=cbb0000007702(a1)):(a1=cbb0000007687(a2),a2=cbb0000007688(a1),a1=a1+a2,a1=cbb0000007689(a1),a1=cbb0000007690(),a2=cbb0000007691(),a1=a2!=a1,cbb0000007692(a1)):(a1=cbb0000004788(a2),a2=cbb0000004789(a1),a1=a2==a1,a4=cbb0000004790(a1),a1=cbb0000004791(a6),a2=cbb0000004792(a1),a1=a1>>>a2,a2=cbb0000004793(a1));}else if(s_cbb<oo1972){s_cbb<oo1973?(a1=cbb0000004889(a2),a2=cbb0000004890(a1),a1=a2==a1,a4=cbb0000004891(a1),a1=cbb0000004892(a2),a2=cbb0000004893(a1),a1=a1/a2,a3=cbb0000004894(a1)):(a1=cbb0000004713(a3),a2=cbb0000004714(a1),a2.push(a1),a1=cbb0000004715(a2),a1=cbb0000004716(a5),a2=cbb0000004717(a6),a1=a2*a1,a7=cbb0000004718(a1));}else{a5=cbb0000003408(a1);a2=cbb0000003409(void a5);a1=cbb0000003410(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003411(a1)):a9=oo4829;;}}else if(s_cbb<oo1975){if(s_cbb<oo1976){if(s_cbb<oo1977){if(s_cbb<oo1978){if(s_cbb<oo1979){if(s_cbb<oo1980){if(s_cbb<oo1981){a1=cbb000000555(a5);a2=cbb000000556(a3);a3=cbb000000557(a1);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb000000558(a3);a2=cbb000000559(a3);a1=a1>a2;a2=cbb000000560(a1);}else{a1=cbb000000774(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000775(a1)):a9=oo4829;;a1=cbb000000776();a2=cbb000000777();a1=a2!=a1;cbb000000778(a1);}}else s_cbb<oo1983?(a1=cbb0000005980(a6),a2=cbb0000005981(a7),a1=a1|a2,a8=cbb0000005982(a1),a1=cbb0000005983(a3),a2=cbb0000005984(a3),a1=a1>a2,a2=cbb0000005985(a1)):(a1=cbb0000006897(a5),a2=cbb0000006898(a6),a1=a2*a1,a7=cbb0000006899(a1),a5=cbb0000006900(a3),a4=cbb0000006901(!a5));}else s_cbb<oo1984?s_cbb<oo1985?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008298(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008299(a5),a2=cbb0000008300(a6),a1=a2*a1,a7=cbb0000008301(a1)):(a1=cbb0000006662(a3),a2=cbb0000006663(a3),a1=a1>a2,a2=cbb0000006664(a1),a1=cbb0000006665(a3),a2=cbb0000006666(a4),a1=a1%a2,a5=cbb0000006667(a1)):(a1=cbb0000001316(a5),a2=cbb0000001317(a6),a1=a2!==a1,a1=cbb0000001318(a1),function(){debugger;}());}else if(s_cbb<oo1987){if(s_cbb<oo1988){s_cbb<oo1989?(a1=shuz[start++],a2=cbb0000007980(a1),a1=cbb0000007981(a6),a2=cbb0000007982(a7),a1=a1|a2,a8=cbb0000007983(a1)):(a1=cbb0000001809(a3),a2=cbb0000001810(a4),a1=a1^a2,a5=cbb0000001811(a1),function(){a1=shuz[start++];a3=cbb0000001812(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001813(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001814(a4);}());}else if(s_cbb<oo1993){a1=cbb0000005936(a3);a2=cbb0000005937(a4);a1=a1%a2;a5=cbb0000005938(a1);a1=cbb0000005939(a6);a2=cbb0000005940(a7);a1=a1|a2;a8=cbb0000005941(a1);}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008407(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008408(a2);a2=cbb0000008409(a1);a1=a2==a1;a4=cbb0000008410(a1);}}else s_cbb<oo2005?s_cbb<oo2006?(a1=cbb000000886(a5),a2=cbb000000887(a9),a1=a2<a1,a6=cbb000000888(a1),a1=cbb000000889(a5),a2=cbb000000890(a6),a1=a2!==a1,a1=cbb000000891(a1)):(a1=shuz[start++],a2=cbb0000007485(a3),a3=cbb0000007486(a1),a2[constantPool[a1]]=a3,a1=cbb0000007487(a3),a2=cbb0000007488(a4),a1=a1>>a2,a5=cbb0000007489(a1)):(a1=cbb0000003564(),a2=cbb0000003565(),a1=a2!=a1,cbb0000003566(a1),a5=cbb0000003567(a2),a2=cbb0000003568(-a5));}else if(s_cbb<oo2007){if(s_cbb<oo2008){if(s_cbb<oo2009){if(s_cbb<oo2010){a5=cbb0000001245(a1);a2=cbb0000001246(void a5);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}else{a1=cbb0000006265({});a1=cbb0000006266(a3);a2=cbb0000006267(a2);a1=a1<<a2;a1=cbb0000006268(a1);}}else if(s_cbb<oo2017){a1=cbb0000007022(a9);a3=shuz[start++];a1[constantPool[a3]]+=oo4828;a1=cbb0000007023(a3);a2=cbb0000007024(a1);a1=a1<a2;a1=cbb0000007025(a1);}else{a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001273(a6);a2=cbb0000001274(a1);a1=a1>>>a2;a2=cbb0000001275(a1);}}else s_cbb<oo2025?s_cbb<oo2026?(a1=cbb000000189(),a2=cbb000000190(),a1=a2!=a1,cbb000000191(a1),a1=cbb000000192(a2),a2=cbb000000193(a3),a1=a1&a2,a3=cbb000000194(a1)):(a2=cbb0000004042(allthis),a5=cbb0000004043(a3),a4=cbb0000004044(!a5)):(a1=cbb0000002625(a2),a2=cbb0000002626(a1),a1=a2==a1,a4=cbb0000002627(a1),function(){i=[];a2=cbb0000002628(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}());}else if(s_cbb<oo2027){if(s_cbb<oo2028){s_cbb<oo2029?(a5=cbb000000210(a1),a2=cbb000000211(void a5),a1=cbb000000212(),a2=cbb000000213(),a1=a2!=a1,cbb000000214(a1)):(a1=cbb0000006394(a4),a2=cbb0000006395(a5),a1=a1-a2,a6=cbb0000006396(a1),a1=cbb0000006397(a2),a2=cbb0000006398(a1),a1=a1/a2,a3=cbb0000006399(a1));}else if(s_cbb<oo2030){a2=cbb0000005836([]);a1=cbb0000005837(a3);a2=cbb0000005838(a4);a1=a1>>a2;a5=cbb0000005839(a1);}else{a1=cbb0000003430(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003431(a1)):a9=oo4829;;a1=cbb0000003432(a3);a2=cbb0000003433(a4);a1=a1^a2;a5=cbb0000003434(a1);}}else s_cbb<oo2032?s_cbb<oo2033?(a5=cbb0000002685(a1),a2=cbb0000002686(void a5),a1=cbb0000002687(a5),a2=cbb0000002688(a6),a1=a2*a1,a7=cbb0000002689(a1)):(a5=cbb0000002705(a1),a2=cbb0000002706(void a5),a1=cbb0000002707(a3),a2=cbb0000002708(a4),a1=a1^a2,a5=cbb0000002709(a1)):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008211(a1),a5=cbb0000008212(a3),a4=cbb0000008213(!a5));}else if(s_cbb<oo2034){if(s_cbb<oo2035){if(s_cbb<oo2036){if(s_cbb<oo2037){if(s_cbb<oo2038){a1=cbb0000008373(a3);a2=cbb0000008374(a2);a1=a1<<a2;a1=cbb0000008375(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008376(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}else{a1=cbb0000007053(a5);a2=cbb0000007054(a9);a1=a2<a1;a6=cbb0000007055(a1);a1=cbb0000007056(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else s_cbb<oo2051?(a5=cbb0000002356(a3),a4=cbb0000002357(!a5),a1=cbb0000002358(a2),a2=cbb0000002359(a1),a1=a2===a1,a3=cbb0000002360(a1)):(function(){a1=shuz[start++];a3=cbb0000001752(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001753(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001754(a4);}(),a1=cbb0000001755(a5),a2=cbb0000001756(a6),a1=a2*a1,a7=cbb0000001757(a1));}else s_cbb<oo2055?s_cbb<oo2056?(a1=shuz[start++],start+=a1,a1=cbb0000001525(a5),a2=cbb0000001526(a9),a1=a2<a1,a6=cbb0000001527(a1)):(function(){i=[];a2=cbb0000002579(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002580(a2),a2=cbb0000002581(a1),a1=a2-a1,a1=cbb0000002582(a1)):(function(){a1=shuz[start++];a3=cbb0000001879(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001880(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001881(a4);}(),a1=cbb0000001882(a3),a2=cbb0000001883(a4),a1=a1>>a2,a5=cbb0000001884(a1));}else if(s_cbb<oo2060){s_cbb<oo2061?s_cbb<oo2062?(a1=cbb0000005414(cbbb),a2=cbb0000005415(a1),a1=a1<=a2,a5=cbb0000005416(a1),a1=cbb0000005417(a4),a2=cbb0000005418(a5),a1=a1-a2,a6=cbb0000005419(a1)):(function(){a1=cbb0000002451(a2);throw a1;}(),a1=cbb0000002452(a3),a2=cbb0000002453(a4),a1=a1%a2,a5=cbb0000002454(a1)):s_cbb<oo2063?(a1=cbb0000005818(a3),a2=cbb0000005819(a4),a1=a1>>a2,a5=cbb0000005820(a1),a1=cbb0000005821(a5),a2=cbb0000005822(a6),a1=a2!==a1,a1=cbb0000005823(a1)):(a1=cbb0000001403(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001404(a2),a2=cbb0000001405(a1),a1=a2-a1,a1=cbb0000001406(a1));}else if(s_cbb<oo2065){if(s_cbb<oo2066){a1=cbb0000001740(a3);a2=cbb0000001741(a4);a1=a1%a2;a5=cbb0000001742(a1);(function(){a1=shuz[start++];a3=cbb0000001743(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001744(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001745(a4);})();}else{a1=cbb0000003417();a2=cbb0000003418();a1=a2!=a1;cbb0000003419(a1);a1=cbb0000003420(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003421(a1)):a9=oo4829;;}}else{a1=cbb0000003248(a6);a2=cbb0000003249(a7);a1=a1|a2;a8=cbb0000003250(a1);a1=cbb0000003251(a2);a2=cbb0000003252(a1);a1=a2===a1;a3=cbb0000003253(a1);}}else s_cbb<oo2071?s_cbb<oo2072?s_cbb<oo2073?s_cbb<oo2074?(a1=cbb0000004974(a3),a2=cbb0000004975(a3),a1=a1>a2,a2=cbb0000004976(a1),a1=cbb0000004977(a6),a2=cbb0000004978(a7),a1=a1|a2,a8=cbb0000004979(a1)):(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008282(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008283(a2),a2=cbb0000008284(a3),a1=a1&a2,a3=cbb0000008285(a1)):s_cbb<oo2076?(function(){i=[];a2=cbb0000002537(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002538(a3),a2=cbb0000002539(a2),a1=a1<<a2,a1=cbb0000002540(a1)):(a1=cbb000000254(),a2=cbb000000255(),a1=a2!=a1,cbb000000256(a1),a1=cbb000000257(a2),a2=cbb000000258(a1),a1=a1/a2,a3=cbb000000259(a1)):s_cbb<oo2077?s_cbb<oo2078?(a1=cbb0000006788(a2),a2=cbb0000006789(a1),a1=a1+a2,a1=cbb0000006790(a1),a1=cbb0000006791(a5),a2=cbb0000006792(a6),a1=a2*a1,a7=cbb0000006793(a1)):(function(){debugger;}(),a1=cbb0000005614(a6),a2=cbb0000005615(a1),a1=a1>>>a2,a2=cbb0000005616(a1)):(a1=cbb0000006003(a6),a2=cbb0000006004(a7),a1=a1|a2,a8=cbb0000006005(a1),a1=cbb0000006006(a2),a2=cbb0000006007(a1),a1=a1/a2,a3=cbb0000006008(a1)):s_cbb<oo2079?s_cbb<oo2080?s_cbb<oo2081?(a1=cbb0000004998(),a2=cbb0000004999(),a1=a2!=a1,cbb0000005000(a1),a1=cbb0000005001(a3),a2=cbb0000005002(a3),a1=a1>a2,a2=cbb0000005003(a1)):(a1=cbb0000007574(a2),a2=cbb0000007575(a1),a1=a2-a1,a1=cbb0000007576(a1),a1=cbb0000007577(a2),a2=cbb0000007578(a1),a1=a1+a2,a1=cbb0000007579(a1)):s_cbb<oo2082?(a1=cbb0000005310(a3),a2=cbb0000005311(a1),a1=a1<a2,a1=cbb0000005312(a1),a1=cbb0000005313(cbbb),a2=cbb0000005314(a1),a1=a1<=a2,a5=cbb0000005315(a1)):(a1=cbb000000892(a5),a2=cbb000000893(a9),a1=a2<a1,a6=cbb000000894(a1),a1=cbb000000895(a2),a2=cbb000000896(a1),a1=a1+a2,a1=cbb000000897(a1)):s_cbb<oo2083?s_cbb<oo2084?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008221(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008222(a5),a2=cbb0000008223(a6),a1=a2*a1,a7=cbb0000008224(a1)):(a1=cbb0000007228(a3),a2=cbb0000007229(a2),a1=a1<<a2,a1=cbb0000007230(a1),a1=cbb0000007231(a3),a2=cbb0000007232(a1),a1=a1<a2,a1=cbb0000007233(a1)):(function(){a1=shuz[start++];a3=cbb0000001792(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001793(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001794(a4);}(),a1=cbb0000001795(a4),a2=cbb0000001796(a1),a1=a1>=a2,a2=cbb0000001797(a1));}else if(s_cbb<oo2089){if(s_cbb<oo2090){if(s_cbb<oo2091){s_cbb<oo2092?s_cbb<oo2093?s_cbb<oo2094?(a5=cbb0000002700(a1),a2=cbb0000002701(void a5),a1=cbb0000002702(a3),a2=cbb0000002703(a4),a1=a1>>a2,a5=cbb0000002704(a1)):(a2=cbb0000004026(allthis),a1=cbb0000004027(a5),a2=cbb0000004028(a9),a1=a2<a1,a6=cbb0000004029(a1)):s_cbb<oo2095?(a1=cbb0000005686(a6),a2=cbb0000005687(a1),a1=a1>>>a2,a2=cbb0000005688(a1),a1=cbb0000005689(a3),a2=cbb0000005690(a1),a1=a1<a2,a1=cbb0000005691(a1)):(a1=cbb0000004765(a2),a2=cbb0000004766(a1),a1=a2==a1,a4=cbb0000004767(a1),function(){debugger;}()):s_cbb<oo2096?s_cbb<oo2097?(a1=cbb0000006155(a3),a2=cbb0000006156(a2),a1=a1<<a2,a1=cbb0000006157(a1),a1=cbb0000006158(a2),a2=cbb0000006159(a1),a1=a1+a2,a1=cbb0000006160(a1)):(a1=cbb0000002146(a3),a2=cbb0000002147(a4),a3=cbb0000002148(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002149(a3),a2=cbb0000002150(a4),a1=a1>>a2,a5=cbb0000002151(a1)):(a1=cbb0000007784(),a2=cbb0000007785(),a1=a2!=a1,cbb0000007786(a1),a1=cbb0000007787(a4),a2=cbb0000007788(a5),a1=a1-a2,a6=cbb0000007789(a1));}else if(s_cbb<oo2098){s_cbb<oo2099?s_cbb<oo2100?(a1=cbb000000152(a3),a2=cbb000000153(a1),a1=a1 in a2,a1=cbb000000154(a1),a1=cbb000000155(a3),a2=cbb000000156(a4),a1=a1%a2,a5=cbb000000157(a1)):(a5=cbb0000003789(a3),a1=cbb0000003790(typeof a5),a1=cbb0000003791(a3),a2=cbb0000003792(a4),a1=a1^a2,a5=cbb0000003793(a1)):s_cbb<oo2101?(a1=cbb0000006331(a5),a2=cbb0000006332(a6),a1=a2*a1,a7=cbb0000006333(a1),a1=cbb0000006334(a2),a2=cbb0000006335(a1),a1=a1/a2,a3=cbb0000006336(a1)):(a5=cbb0000003675(a1),a2=cbb0000003676(void a5),a5=cbb0000003677(a3),a1=cbb0000003678(typeof a5));}else if(s_cbb<oo2102){if(s_cbb<oo2103){a1=cbb0000008310(a6);a2=cbb0000008311(a1);a1=a1>>>a2;a2=cbb0000008312(a1);(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008313(a2);cbbb[a9]=argsList[a2];}})();}else{a1=cbb000000784(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000785(a1)):a9=oo4829;;a1=cbb000000786(a3);a2=cbb000000787(a4);a1=a1>>a2;a5=cbb000000788(a1);}}else{a1=shuz[start++];start+=a1;a1=cbb0000001538({});}}else if(s_cbb<oo2106){if(s_cbb<oo2107){s_cbb<oo2108?s_cbb<oo2109?(a1=cbb0000006045(a6),a2=cbb0000006046(a7),a1=a1|a2,a8=cbb0000006047(a1),a1=cbb0000006048(),a2=cbb0000006049(),a1=a2!=a1,cbb0000006050(a1)):(a1=cbb0000002483(cbbb),a2=cbb0000002484(a1),a1=a1<=a2,a5=cbb0000002485(a1),function(){a1=cbb0000002486(a2);throw a1;}()):s_cbb<oo2110?(a1=cbb0000006495(a6),a2=cbb0000006496(a7),a1=a1|a2,a8=cbb0000006497(a1),a1=cbb0000006498(a3),a2=cbb0000006499(a4),a1=a1^a2,a5=cbb0000006500(a1)):(a1=cbb0000002062({}),a1=cbb0000002063(a2),a2=cbb0000002064(a3),a1=a1&a2,a3=cbb0000002065(a1));}else if(s_cbb<oo2111){if(s_cbb<oo2112){a2=cbb0000003396([]);a1=cbb0000003397(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003398(a1)):a9=oo4829;;}else{a1=cbb0000003599(a3);a2=cbb0000003600(a3);a1=a1>a2;a2=cbb0000003601(a1);a5=cbb0000003602(a2);a2=cbb0000003603(-a5);}}else{a1=cbb0000001381(a3);a2=cbb0000001382(a1);a1=a1<a2;a1=cbb0000001383(a1);a1=cbb0000001384(a2);a3=shuz[start++];a1?start+=a3:a9=oo4820;}}else if(s_cbb<oo2115){if(s_cbb<oo2116){if(s_cbb<oo2117){a1=cbb0000001003(a3);a2=cbb0000001004(a1);a1=a1<a2;a1=cbb0000001005(a1);return;}else{a1=cbb000000395(a5);a2=cbb000000396(a9);a1=a2<a1;a6=cbb000000397(a1);a1=cbb000000398(a2);a2=cbb000000399(a1);a1=a2-a1;a1=cbb000000400(a1);}}else s_cbb<oo2118?(all=cbbb,a1=cbb0000007388(cbbb),a1=cbb0000007389(a3),a2=cbb0000007390(a2),a1=a1<<a2,a1=cbb0000007391(a1)):(a1=cbb0000006828(a5),a2=cbb0000006829(a6),a1=a2*a1,a7=cbb0000006830(a1),a1=cbb0000006831(a5),a2=cbb0000006832(a9),a1=a2<a1,a6=cbb0000006833(a1));}else s_cbb<oo2119?s_cbb<oo2120?(a1=cbb0000001461(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001462(a5),a2=cbb0000001463(a6),a1=a2!==a1,a1=cbb0000001464(a1)):(a1=cbb000000439(a2),a2=cbb000000440(a1),a1=a2-a1,a1=cbb000000441(a1),a1=cbb000000442(a3),a2=cbb000000443(a1),a1=a1<a2,a1=cbb000000444(a1)):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004173(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004174(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004175(a4);}(),a1=cbb0000004176(a3),a2=cbb0000004177(a1),a1=a1<a2,a1=cbb0000004178(a1));}else if(s_cbb<oo2129){if(s_cbb<oo2130){s_cbb<oo2131?s_cbb<oo2132?s_cbb<oo2133?(a1=cbb0000005030({}),a1=cbb0000005031(a3),a2=cbb0000005032(a3),a1=a1>a2,a2=cbb0000005033(a1)):(a1=cbb0000002336(a3),a2=cbb0000002337(a4),a1=a1^a2,a5=cbb0000002338(a1),a5=cbb0000002339(a3),a4=cbb0000002340(!a5)):s_cbb<oo2134?(a1=cbb000000910(a3),a2=cbb000000911(a2),a1=a1<<a2,a1=cbb000000912(a1),a1=cbb000000913(a5),a2=cbb000000914(a9),a1=a2<a1,a6=cbb000000915(a1)):(a1=cbb0000007437(a2),a2=cbb0000007438(a1),a1=a1/a2,a3=cbb0000007439(a1),a1=shuz[start++],a2=cbb0000007440(a3),a3=cbb0000007441(a1),a2[constantPool[a1]]=a3):s_cbb<oo2135?s_cbb<oo2136?(a1=cbb0000006253(a3),a2=cbb0000006254(a2),a1=a1<<a2,a1=cbb0000006255(a1),a1=cbb0000006256(a3),a2=cbb0000006257(a4),a1=a1>>a2,a5=cbb0000006258(a1)):(a1=cbb0000002447(a2),a2=cbb0000002448(a1),a1=a2==a1,a4=cbb0000002449(a1),function(){a1=cbb0000002450(a2);throw a1;}()):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008175(a1),a5=cbb0000008176(a3),a4=cbb0000008177(!a5));}else if(s_cbb<oo2137){if(s_cbb<oo2138){s_cbb<oo2139?(a1=cbb0000005783(a3),a2=cbb0000005784(a4),a1=a1>>a2,a5=cbb0000005785(a1),a1=cbb0000005786(a2),a2=cbb0000005787(a1),a1=a1+a2,a1=cbb0000005788(a1)):(a1=cbb0000001999({}),a1=cbb0000002000(a2),a2=cbb0000002001(a1),a1=a1+a2,a1=cbb0000002002(a1));}else if(s_cbb<oo2140){a1=cbb000000696(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000697(a1)):a9=oo4829;;a1=cbb000000698();a2=cbb000000699();a1=a2!=a1;cbb000000700(a1);}else{a1=cbb0000002670(cbbb);a2=cbb0000002671(a1);a1=a1<=a2;a5=cbb0000002672(a1);a5=cbb0000002673(a1);a2=cbb0000002674(void a5);}}else s_cbb<oo2142?s_cbb<oo2143?(a1=cbb0000005533(a2),a2=cbb0000005534(a3),a1=a1&a2,a3=cbb0000005535(a1),a2=cbb0000005536([])):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004248(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004249(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004250(a4);}(),a1=cbb0000004251(a5),a2=cbb0000004252(a6),a1=a2*a1,a7=cbb0000004253(a1)):(a1=cbb0000006249(a3),a2=cbb0000006250(a2),a1=a1<<a2,a1=cbb0000006251(a1),a2=cbb0000006252([]));}else if(s_cbb<oo2151){if(s_cbb<oo2152){s_cbb<oo2153?s_cbb<oo2154?(a1=shuz[start++],start+=a1,a1=cbb0000001473(a2),a2=cbb0000001474(a1),a1=a1+a2,a1=cbb0000001475(a1)):(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008241(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008242(cbbb),a2=cbb0000008243(a1),a1=a1<=a2,a5=cbb0000008244(a1)):s_cbb<oo2156?(a1=cbb0000007030(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000007031(a4),a2=cbb0000007032(a5),a1=a1-a2,a6=cbb0000007033(a1)):(a1=cbb000000813(a5),a2=cbb000000814(a9),a1=a2<a1,a6=cbb000000815(a1),a1=cbb000000816(a3),a2=cbb000000817(a4),a1=a1%a2,a5=cbb000000818(a1));}else if(s_cbb<oo2158){if(s_cbb<oo2159){a1=cbb0000001449(a3);a2=cbb0000001450(a4);a1=a1>>a2;a5=cbb0000001451(a1);a1=cbb0000001452(a2);a3=shuz[start++];a1?start+=a3:a9=oo4820;}else{a1=cbb0000007149(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a2=cbb0000007150([]);}}else{a1=cbb0000008439(a5);a2=cbb0000008440(a9);a1=a2<a1;a6=cbb0000008441(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008442(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else if(s_cbb<oo2173){if(s_cbb<oo2174){s_cbb<oo2175?(a1=cbb0000007675(a2),a2=cbb0000007676(a1),a1=a1+a2,a1=cbb0000007677(a1),a1=cbb0000007678(a5),a2=cbb0000007679(a9),a1=a2<a1,a6=cbb0000007680(a1)):(a1=shuz[start++],start+=a1,function(){debugger;}());}else{a1=cbb000000734(a3);a2=cbb000000735(a4);a1=a1^a2;a5=cbb000000736(a1);a1=cbb000000737(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000738(a1)):a9=oo4829;;}}else if(s_cbb<oo2177){s_cbb<oo2178?(a1=cbb0000002024({}),a1=cbb0000002025(a5),a2=cbb0000002026(a9),a1=a2<a1,a6=cbb0000002027(a1)):(a1=cbb0000002998(a6),a2=cbb0000002999(a1),a1=a1>>>a2,a2=cbb0000003000(a1),a1=cbb0000003001(a5),a2=cbb0000003002(a6),a1=a2!==a1,a1=cbb0000003003(a1));}else{a1=cbb0000007121(a2);a2=cbb0000007122(a1);a1=a2==a1;a4=cbb0000007123(a1);a1=cbb0000007124(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else if(s_cbb<oo2180){if(s_cbb<oo2181){if(s_cbb<oo2182){if(s_cbb<oo2183){if(s_cbb<oo2184){if(s_cbb<oo2185){if(s_cbb<oo2186){if(s_cbb<oo2187){a1=cbb0000005040(a3);a2=cbb0000005041(a3);a1=a1>a2;a2=cbb0000005042(a1);a1=cbb0000005043(a3);a2=cbb0000005044(a3);a1=a1>a2;a2=cbb0000005045(a1);}else{a1=cbb0000001206(a3);a2=cbb0000001207(a1);a1=a1<a2;a1=cbb0000001208(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo2194?(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008215(a1),a1=cbb0000008216(),a2=cbb0000008217(),a1=a2!=a1,cbb0000008218(a1)):(a1=cbb000000451(a3),a2=cbb000000452(a3),a1=a1>a2,a2=cbb000000453(a1),a1=cbb000000454(a2),a2=cbb000000455(a1),a1=a2-a1,a1=cbb000000456(a1));}else if(s_cbb<oo2195){if(s_cbb<oo2196){(function(){i=[];a2=cbb0000002575(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;})();a1=cbb0000002576(a5);a2=cbb0000002577(a6);a1=a2!==a1;a1=cbb0000002578(a1);}else{a1=cbb0000001024(a4);a2=cbb0000001025(a1);a1=a1>=a2;a2=cbb0000001026(a1);return;}}else{a1=shuz[start++];a2=cbb0000007933(a1);a5=cbb0000007934(a3);a4=cbb0000007935(!a5);}}else if(s_cbb<oo2197){if(s_cbb<oo2198){s_cbb<oo2199?(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003943()):a4.splice(oo4806,oo4806,cbb0000003944());}a1=cbb0000003945(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003946(a3),a2=cbb0000003947(a4),a1=a1%a2,a5=cbb0000003948(a1)):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008207(a1),a1=cbb0000008208(a5),a2=cbb0000008209(a6),a1=a2!==a1,a1=cbb0000008210(a1));}else if(s_cbb<oo2209){a1=cbb0000001050(a5);a2=cbb0000001051(a6);a1=a2*a1;a7=cbb0000001052(a1);return;}else{a1=cbb000000525(a2);a2=cbb000000526(a1);a1=a2-a1;a1=cbb000000527(a1);a1=cbb000000528(a5);a2=cbb000000529(a3);a3=cbb000000530(a1);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;}}else s_cbb<oo2210?s_cbb<oo2211?(a1=cbb0000004883(a2),a2=cbb0000004884(a3),a1=a1&a2,a3=cbb0000004885(a1),a1=cbb0000004886(a2),a2=cbb0000004887(a1),a1=a2==a1,a4=cbb0000004888(a1)):(a1=cbb0000003296(a2),a2=cbb0000003297(a1),a1=a2===a1,a3=cbb0000003298(a1),a1=cbb0000003299(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=cbb000000864(a3),a2=cbb000000865(a4),a1=a1%a2,a5=cbb000000866(a1),a1=cbb000000867(a5),a2=cbb000000868(a9),a1=a2<a1,a6=cbb000000869(a1));}else if(s_cbb<oo2213){if(s_cbb<oo2214){s_cbb<oo2215?s_cbb<oo2216?(a1=cbb0000005573(a2),a2=cbb0000005574(a1),a1=a2-a1,a1=cbb0000005575(a1),a1=cbb0000005576(a2),a2=cbb0000005577(a3),a1=a1&a2,a3=cbb0000005578(a1)):(a1=cbb0000003300(a3),a2=cbb0000003301(a1),a1=a1<a2,a1=cbb0000003302(a1),a1=cbb0000003303(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):s_cbb<oo2218?(a2=cbb0000005294(allthis),a1=cbb0000005295(cbbb),a2=cbb0000005296(a1),a1=a1<=a2,a5=cbb0000005297(a1)):(a1=cbb0000002381(a3),a2=cbb0000002382(a1),a1=a1<a2,a1=cbb0000002383(a1),a5=cbb0000002384(a3),a4=cbb0000002385(!a5));}else if(s_cbb<oo2219){if(s_cbb<oo2220){a1=cbb0000001063(a3);a2=cbb0000001064(a4);a1=a1%a2;a5=cbb0000001065(a1);j=cbb0000001066(a1);j2=cbb0000001067(a2);j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1;}else{a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001267(a2);a2=cbb0000001268(a1);a1=a1/a2;a3=cbb0000001269(a1);}}else{a1=cbb000000578(a4);a2=cbb000000579(a5);a1=a1-a2;a6=cbb000000580(a1);a1=cbb000000581(a5);a2=cbb000000582(a3);a3=cbb000000583(a1);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;}}else if(s_cbb<oo2227){if(s_cbb<oo2228){s_cbb<oo2229?(a1=cbb0000005719(a6),a2=cbb0000005720(a1),a1=a1>>>a2,a2=cbb0000005721(a1),a1=cbb0000005722(a3),a2=cbb0000005723(a1),a1=a1<a2,a1=cbb0000005724(a1)):(a2=cbb0000001940([]),a1=cbb0000001941(a3),a2=cbb0000001942(a2),a1=a1<<a2,a1=cbb0000001943(a1));}else if(s_cbb<oo2230){a1=cbb0000001012(a2);a2=cbb0000001013(a1);a1=a1+a2;a1=cbb0000001014(a1);return;}else{a1=shuz[start++];a2=cbb0000007988(a1);a1=cbb0000007989(a3);a2=cbb0000007990(a4);a1=a1^a2;a5=cbb0000007991(a1);}}else s_cbb<oo2231?s_cbb<oo2232?(a1=cbb000000242(),a2=cbb000000243(),a1=a2!=a1,cbb000000244(a1),a1=cbb000000245(a2),a2=cbb000000246(a1),a1=a2===a1,a3=cbb000000247(a1)):(a1=cbb0000005585(a2),a2=cbb0000005586(a3),a1=a1&a2,a3=cbb0000005587(a1),a1=cbb0000005588(a6),a2=cbb0000005589(a1),a1=a1>>>a2,a2=cbb0000005590(a1)):(a1=cbb0000001899(a3),a2=cbb0000001900(a4),a1=a1>>a2,a5=cbb0000001901(a1),a2=cbb0000001902([]));}else if(s_cbb<oo2233){if(s_cbb<oo2234){s_cbb<oo2235?s_cbb<oo2236?s_cbb<oo2237?(a1=cbb0000004385(a2),a2=cbb0000004386(a3),a3=cbb0000004387(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004388(a3),a1=cbb0000004389(a3),a2=cbb0000004390(a4),a1=a1>>a2,a5=cbb0000004391(a1)):(a5=cbb0000008143(a1),a2=cbb0000008144(void a5),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008145(a1)):s_cbb<oo2238?(a2=cbb0000002510(allthis),function(){a1=cbb0000002511(a2);throw a1;}()):(a1=cbb0000002084(a4),a2=cbb0000002085(a5),a1=a1-a2,a6=cbb0000002086(a1),a1=cbb0000002087({})):s_cbb<oo2239?s_cbb<oo2240?(a1=cbb000000663(a5),a2=cbb000000664(a3),a3=cbb000000665(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000666(a2),a2=cbb000000667(a3),a1=a1&a2,a3=cbb000000668(a1)):(a1=cbb0000008195(a2),a2=cbb0000008196(a1),a1=a1+a2,a1=cbb0000008197(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008198(a1)):(a1=cbb0000003955(a5),a2=cbb0000003956(a6),a1=a2!==a1,a1=cbb0000003957(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003958()):a4.splice(oo4806,oo4806,cbb0000003959());}a1=cbb0000003960(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}());}else if(s_cbb<oo2250){if(s_cbb<oo2251){if(s_cbb<oo2252){a1=cbb0000003213(a3);a2=cbb0000003214(a4);a1=a1^a2;a5=cbb0000003215(a1);a1=cbb0000003216(a2);a2=cbb0000003217(a1);a1=a2===a1;a3=cbb0000003218(a1);}else{a1=cbb0000003459(a5);a2=cbb0000003460(a6);a1=a2!==a1;a1=cbb0000003461(a1);a1=cbb0000003462(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003463(a1)):a9=oo4829;;}}else s_cbb<oo2254?(a1=shuz[start++],start+=a1,a1=cbb0000001479(a3),a2=cbb0000001480(a4),a1=a1^a2,a5=cbb0000001481(a1)):(a1=cbb0000006424(a2),a2=cbb0000006425(a1),a1=a1/a2,a3=cbb0000006426(a1),a1=cbb0000006427(a3),a2=cbb0000006428(a4),a1=a1%a2,a5=cbb0000006429(a1));}else s_cbb<oo2255?s_cbb<oo2256?(a5=cbb0000004505(a3),a1=cbb0000004506(~a5),a5=cbb0000004507(a3),a4=cbb0000004508(!a5)):(a1=shuz[start++],a2=cbb0000007957(a1),a1=cbb0000007958(a2),a2=cbb0000007959(a1),a1=a1/a2,a3=cbb0000007960(a1)):(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008266(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008267(a4),a2=cbb0000008268(a1),a1=a1>=a2,a2=cbb0000008269(a1));}else if(s_cbb<oo2258){if(s_cbb<oo2259){if(s_cbb<oo2260){if(s_cbb<oo2261){a1=cbb0000005846(a3);a2=cbb0000005847(a4);a1=a1>>a2;a5=cbb0000005848(a1);a1=cbb0000005849(a2);a2=cbb0000005850(a1);a1=a2==a1;a4=cbb0000005851(a1);}else{a1=cbb0000001218(a6);a2=cbb0000001219(a7);a1=a1|a2;a8=cbb0000001220(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else if(s_cbb<oo2268){a1=cbb0000003497(a2);a2=cbb0000003498(a1);a1=a2===a1;a3=cbb0000003499(a1);a1=cbb0000003500(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003501(a1)):a9=oo4829;;}else{a2=cbb0000002583([]);(function(){i=[];a2=cbb0000002584(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;})();}}else s_cbb<oo2270?s_cbb<oo2271?(a1=shuz[start++],start+=a1,a1=cbb0000001514({})):(a1=cbb0000007709(a2),a2=cbb0000007710(a1),a1=a1+a2,a1=cbb0000007711(a1),a2=cbb0000007712(allthis)):(function(){debugger;}(),a5=cbb0000001311(a3),a4=cbb0000001312(!a5));}else s_cbb<oo2272?s_cbb<oo2273?s_cbb<oo2274?(function(){a1=cbb0000001575(a2);a2=cbb0000001576(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001577(a1);}(),a1=cbb0000001578(a3),a2=cbb0000001579(a4),a1=a1%a2,a5=cbb0000001580(a1)):(a1=cbb0000004119(a2),a2=cbb0000004120(a1),a1=a2-a1,a1=cbb0000004121(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004122(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004123(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004124(a4);}()):s_cbb<oo2282?(a5=cbb0000005504(a1),a2=cbb0000005505(void a5),a1=cbb0000005506(a2),a2=cbb0000005507(a3),a1=a1&a2,a3=cbb0000005508(a1)):(a1=cbb0000002306(a3),a2=cbb0000002307(a4),a1=a1>>a2,a5=cbb0000002308(a1),a5=cbb0000002309(a3),a4=cbb0000002310(!a5)):s_cbb<oo2283?s_cbb<oo2284?(a1=cbb0000001357(a3),a2=cbb0000001358(a2),a1=a1<<a2,a1=cbb0000001359(a1),function(){debugger;}()):(all=cbbb,a1=cbb0000007404(cbbb),a2=cbb0000007405(allthis)):(a1=shuz[start++],start+=a1,a1=cbb0000001511(a3),a2=cbb0000001512(a3),a1=a1>a2,a2=cbb0000001513(a1));}else if(s_cbb<oo2285){if(s_cbb<oo2286){if(s_cbb<oo2287){if(s_cbb<oo2288){if(s_cbb<oo2289){if(s_cbb<oo2290){a1=cbb0000003320(a3);a3=shuz[start++];a1[constantPool[a3]]-=oo4828;a1=cbb0000003321(a2);a2=cbb0000003322(a1);a1=a1+a2;a1=cbb0000003323(a1);}else{a1=cbb0000001230(a2);a2=cbb0000001231(a1);a1=a1+a2;a1=cbb0000001232(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo2298?(a1=cbb0000005649(a3),a2=cbb0000005650(a4),a1=a1>>a2,a5=cbb0000005651(a1),a1=cbb0000005652(a6),a2=cbb0000005653(a1),a1=a1>>>a2,a2=cbb0000005654(a1)):(a1=cbb0000004689(a6),a2=cbb0000004690(a1),a1=a1>>>a2,a2=cbb0000004691(a1),a1=cbb0000004692(a3),a2=cbb0000004693(a1),a2.push(a1),a1=cbb0000004694(a2));}else s_cbb<oo2299?s_cbb<oo2300?(a1=cbb0000006507(a5),a2=cbb0000006508(a9),a1=a2<a1,a6=cbb0000006509(a1),a1=cbb0000006510(a3),a2=cbb0000006511(a4),a1=a1^a2,a5=cbb0000006512(a1)):(a1=shuz[start++],a2=cbb0000008026(constantPool[a1]),a1=cbb0000008027(a3),a2=cbb0000008028(a4),a1=a1^a2,a5=cbb0000008029(a1)):(a1=cbb0000006435(a5),a2=cbb0000006436(a9),a1=a2<a1,a6=cbb0000006437(a1),a1=cbb0000006438(a2),a2=cbb0000006439(a1),a1=a1/a2,a3=cbb0000006440(a1));}else s_cbb<oo2301?s_cbb<oo2302?s_cbb<oo2303?(function(){debugger;}(),a1=cbb0000003051(a5),a2=cbb0000003052(a6),a1=a2!==a1,a1=cbb0000003053(a1)):(a1=cbb00000028(a3),a2=cbb00000029(a4),a1=a1%a2,a5=cbb00000030(a1),a1=cbb00000031(a3),a2=cbb00000032(a1),a1=a1 in a2,a1=cbb00000033(a1)):s_cbb<oo2304?(a1=shuz[start++],a2=cbb0000007442(a3),a3=cbb0000007443(a1),a2[constantPool[a1]]=a3,a2=cbb0000007444([])):(a2=cbb0000001303([]),function(){debugger;}()):s_cbb<oo2305?s_cbb<oo2306?(a1=cbb0000006812(a5),a2=cbb0000006813(a6),a1=a2*a1,a7=cbb0000006814(a1),a2=cbb0000006815(allthis)):(a5=cbb0000003371(a3),a4=cbb0000003372(!a5),a1=cbb0000003373(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a2=cbb0000001970([]),a2=cbb0000001971([]));}else s_cbb<oo2308?s_cbb<oo2309?s_cbb<oo2310?s_cbb<oo2311?(a1=cbb0000004266(a4),a2=cbb0000004267(a1),a1=a1>=a2,a2=cbb0000004268(a1),a1=cbb0000004269(a2),a2=cbb0000004270(a3),a3=cbb0000004271(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004272(a3)):(a1=cbb000000593(a3),a2=cbb000000594(a4),a1=a1%a2,a5=cbb000000595(a1),a1=cbb000000596(a5),a2=cbb000000597(a3),a3=cbb000000598(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):s_cbb<oo2312?(a1=cbb0000002967(a3),a2=cbb0000002968(a3),a1=a1>a2,a2=cbb0000002969(a1),a1=cbb0000002970(a5),a2=cbb0000002971(a6),a1=a2!==a1,a1=cbb0000002972(a1)):(a5=cbb0000002662(a1),a2=cbb0000002663(void a5),a1=cbb0000002664({})):s_cbb<oo2313?s_cbb<oo2314?(a1=cbb0000007945(a3),a2=cbb0000007946(a4),a1=a1^a2,a5=cbb0000007947(a1),a1=shuz[start++],a2=cbb0000007948(a1)):(a1=cbb000000876(a6),a2=cbb000000877(a1),a1=a1>>>a2,a2=cbb000000878(a1),a1=cbb000000879(a5),a2=cbb000000880(a9),a1=a2<a1,a6=cbb000000881(a1)):(function(){a1=shuz[start++];a3=cbb0000001857(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001858(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001859(a4);}(),a1=cbb0000001860(a3),a2=cbb0000001861(a4),a1=a1^a2,a5=cbb0000001862(a1)):s_cbb<oo2318?s_cbb<oo2319?s_cbb<oo2320?(a1=cbb0000008189(a2),a2=cbb0000008190(a1),a1=a1/a2,a3=cbb0000008191(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008192(a1)):(j=cbb0000001147(a1),j2=cbb0000001148(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001149(a4),a2=cbb0000001150(a1),a1=a1>=a2,a2=cbb0000001151(a1)):s_cbb<oo2321?(a5=cbb0000006518(a1),a2=cbb0000006519(void a5),a1=cbb0000006520(a3),a2=cbb0000006521(a4),a1=a1^a2,a5=cbb0000006522(a1)):(a5=cbb0000004549(a3),a1=cbb0000004550(~a5),a2=cbb0000004551([])):s_cbb<oo2322?s_cbb<oo2323?(a5=cbb0000004519(a3),a1=cbb0000004520(~a5),a1=cbb0000004521(),a2=cbb0000004522(),a1=a2!=a1,cbb0000004523(a1)):(a1=cbb0000003739(a2),a2=cbb0000003740(a1),a1=a1/a2,a3=cbb0000003741(a1),a5=cbb0000003742(a3),a1=cbb0000003743(typeof a5)):(a1=cbb0000006051(a6),a2=cbb0000006052(a7),a1=a1|a2,a8=cbb0000006053(a1),a1=cbb0000006054({}));}else if(s_cbb<oo2324){if(s_cbb<oo2325){if(s_cbb<oo2326){if(s_cbb<oo2327){if(s_cbb<oo2328){a1=cbb0000007392(a4);a2=cbb0000007393(a5);a1=a1-a2;a6=cbb0000007394(a1);all=cbbb;a1=cbb0000007395(cbbb);}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008377(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008378(a4);a2=cbb0000008379(a5);a1=a1-a2;a6=cbb0000008380(a1);}}else s_cbb<oo2340?(a5=cbb0000002386(a1),a2=cbb0000002387(void a5),a5=cbb0000002388(a3),a4=cbb0000002389(!a5)):(a1=cbb0000003883(a2),a2=cbb0000003884(a1),a1=a1+a2,a1=cbb0000003885(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003886()):a4.splice(oo4806,oo4806,cbb0000003887());}a1=cbb0000003888(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}());}else s_cbb<oo2350?s_cbb<oo2351?(a1=cbb0000006149(a5),a2=cbb0000006150(a6),a1=a2!==a1,a1=cbb0000006151(a1),a1=cbb0000006152(a3),a2=cbb0000006153(a2),a1=a1<<a2,a1=cbb0000006154(a1)):(a1=cbb0000006127(a3),a2=cbb0000006128(a2),a1=a1<<a2,a1=cbb0000006129(a1),a1=cbb0000006130({})):(a1=cbb0000006656(a3),a2=cbb0000006657(a4),a1=a1%a2,a5=cbb0000006658(a1),a1=cbb0000006659(a5),a2=cbb0000006660(a6),a1=a2*a1,a7=cbb0000006661(a1));}else if(s_cbb<oo2352){s_cbb<oo2353?s_cbb<oo2354?(a1=cbb0000006296(a2),a2=cbb0000006297(a1),a1=a1/a2,a3=cbb0000006298(a1),a1=cbb0000006299(a2),a2=cbb0000006300(a1),a1=a1/a2,a3=cbb0000006301(a1)):(a1=cbb0000005924(a2),a2=cbb0000005925(a1),a1=a2==a1,a4=cbb0000005926(a1),a1=cbb0000005927(a3),a2=cbb0000005928(a4),a1=a1>>a2,a5=cbb0000005929(a1)):s_cbb<oo2355?(a1=cbb0000006167(a3),a2=cbb0000006168(a2),a1=a1<<a2,a1=cbb0000006169(a1),a5=cbb0000006170(a1),a2=cbb0000006171(void a5)):(a1=cbb0000005521(a2),a2=cbb0000005522(a3),a1=a1&a2,a3=cbb0000005523(a1),a1=cbb0000005524(a2),a2=cbb0000005525(a1),a1=a2===a1,a3=cbb0000005526(a1));}else if(s_cbb<oo2356){if(s_cbb<oo2357){a1=cbb0000001722();a2=cbb0000001723();a1=a2!=a1;cbb0000001724(a1);(function(){a1=shuz[start++];a3=cbb0000001725(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001726(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001727(a4);})();}else{a1=cbb0000003505(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003506(a1)):a9=oo4829;;a1=cbb0000003507(cbbb);a2=cbb0000003508(a1);a1=a1<=a2;a5=cbb0000003509(a1);}}else{a1=cbb0000005225(a6);a2=cbb0000005226(a1);a1=a1>>>a2;a2=cbb0000005227(a1);a1=cbb0000005228(a4);a2=cbb0000005229(a1);a1=a1>=a2;a2=cbb0000005230(a1);}}else s_cbb<oo2362?s_cbb<oo2363?s_cbb<oo2364?s_cbb<oo2365?(a1=cbb0000007034(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000007035(a2),a2=cbb0000007036(a1),a1=a1+a2,a1=cbb0000007037(a1)):(a2=cbb0000004059(allthis),a2=cbb0000004060(allthis)):s_cbb<oo2367?(a1=cbb0000003646(a4),a2=cbb0000003647(a5),a1=a1-a2,a6=cbb0000003648(a1),a5=cbb0000003649(a2),a2=cbb0000003650(-a5)):(a1=cbb0000007583(a3),a2=cbb0000007584(a4),a1=a1>>a2,a5=cbb0000007585(a1),a1=cbb0000007586(a2),a2=cbb0000007587(a1),a1=a1+a2,a1=cbb0000007588(a1)):s_cbb<oo2368?s_cbb<oo2369?(a1=cbb0000001693(a3),a2=cbb0000001694(a4),a1=a1>>a2,a5=cbb0000001695(a1),function(){a1=cbb0000001696(a2);a2=cbb0000001697(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001698(a1);}()):(a5=cbb0000002487(a1),a2=cbb0000002488(void a5),function(){a1=cbb0000002489(a2);throw a1;}()):(a1=cbb0000006161(a6),a2=cbb0000006162(a1),a1=a1>>>a2,a2=cbb0000006163(a1),a1=cbb0000006164(a3),a2=cbb0000006165(a2),a1=a1<<a2,a1=cbb0000006166(a1)):s_cbb<oo2370?s_cbb<oo2371?s_cbb<oo2372?(a5=cbb0000005203(a1),a2=cbb0000005204(void a5),a1=cbb0000005205(a4),a2=cbb0000005206(a1),a1=a1>=a2,a2=cbb0000005207(a1)):(a1=cbb0000007246(a2),a2=cbb0000007247(a1),a1=a1+a2,a1=cbb0000007248(a1),a1=cbb0000007249(a3),a2=cbb0000007250(a1),a1=a1<a2,a1=cbb0000007251(a1)):(a1=cbb0000006308(a2),a2=cbb0000006309(a1),a1=a1+a2,a1=cbb0000006310(a1),a1=cbb0000006311(a2),a2=cbb0000006312(a1),a1=a1/a2,a3=cbb0000006313(a1)):s_cbb<oo2373?s_cbb<oo2374?(a1=cbb0000006172(a2),a2=cbb0000006173(a1),a1=a2-a1,a1=cbb0000006174(a1),a1=cbb0000006175(a3),a2=cbb0000006176(a2),a1=a1<<a2,a1=cbb0000006177(a1)):(function(){a1=cbb0000001699(a2);a2=cbb0000001700(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001701(a1);}(),a5=cbb0000001702(a1),a2=cbb0000001703(void a5)):(function(){debugger;}(),a1=cbb0000005473(a2),a2=cbb0000005474(a3),a1=a1&a2,a3=cbb0000005475(a1));}else if(s_cbb<oo2375){if(s_cbb<oo2376){if(s_cbb<oo2377){if(s_cbb<oo2378){if(s_cbb<oo2379){if(s_cbb<oo2380){s_cbb<oo2381?(a1=cbb0000005356(cbbb),a2=cbb0000005357(a1),a1=a1<=a2,a5=cbb0000005358(a1),a5=cbb0000005359(a1),a2=cbb0000005360(void a5)):(a1=cbb0000002541(a4),a2=cbb0000002542(a1),a1=a1>=a2,a2=cbb0000002543(a1),function(){i=[];a2=cbb0000002544(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}());}else if(s_cbb<oo2382){a1=cbb0000005597({});a1=cbb0000005598(a2);a2=cbb0000005599(a3);a1=a1&a2;a3=cbb0000005600(a1);}else{a1=cbb0000001282(a2);a2=cbb0000001283(a1);a1=a1+a2;a1=cbb0000001284(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo2389?s_cbb<oo2390?(a1=cbb0000003776(a3),a2=cbb0000003777(a4),a1=a1^a2,a5=cbb0000003778(a1),a5=cbb0000003779(a3),a1=cbb0000003780(typeof a5)):(a1=cbb0000001491(a6),a2=cbb0000001492(a7),a1=a1|a2,a8=cbb0000001493(a1),a1=shuz[start++],start+=a1):(a2=cbb0000001956([]),a1=cbb0000001957(a2),a2=cbb0000001958(a1),a1=a2===a1,a3=cbb0000001959(a1));}else s_cbb<oo2391?s_cbb<oo2392?s_cbb<oo2393?(a1=cbb000000348(a6),a2=cbb000000349(a7),a1=a1|a2,a8=cbb000000350(a1),a1=cbb000000351(a2),a2=cbb000000352(a1),a1=a2-a1,a1=cbb000000353(a1)):(function(){debugger;}(),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008214(a1)):s_cbb<oo2394?(a5=cbb0000005345(a3),a4=cbb0000005346(!a5),a1=cbb0000005347(cbbb),a2=cbb0000005348(a1),a1=a1<=a2,a5=cbb0000005349(a1)):(a5=cbb0000005220(a1),a2=cbb0000005221(void a5),a1=cbb0000005222(a4),a2=cbb0000005223(a1),a1=a1>=a2,a2=cbb0000005224(a1)):s_cbb<oo2395?s_cbb<oo2396?(a2=cbb0000004089(allthis),a1=cbb0000004090(a3),a2=cbb0000004091(a1),a1=a1<a2,a1=cbb0000004092(a1)):(a1=cbb0000005208(a4),a2=cbb0000005209(a1),a1=a1>=a2,a2=cbb0000005210(a1),a1=cbb0000005211(a2),a2=cbb0000005212(a3),a1=a1&a2,a3=cbb0000005213(a1)):(a5=cbb0000003749(a3),a1=cbb0000003750(typeof a5),a1=cbb0000003751(a5),a2=cbb0000003752(a6),a1=a2!==a1,a1=cbb0000003753(a1));}else if(s_cbb<oo2397){if(s_cbb<oo2398){if(s_cbb<oo2399){s_cbb<oo2400?(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008115(a1),a1=cbb0000008116(cbbb),a2=cbb0000008117(a1),a1=a1<=a2,a5=cbb0000008118(a1)):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004110(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004111(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004112(a4);}(),a1=cbb0000004113(a2),a2=cbb0000004114(a1),a1=a2==a1,a4=cbb0000004115(a1));}else if(s_cbb<oo2408){a1=cbb0000002170(a3);a2=cbb0000002171(a4);a3=cbb0000002172(a5);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000002173(a3);a2=cbb0000002174(a4);a1=a1%a2;a5=cbb0000002175(a1);}else{a1=cbb000000986();a2=cbb000000987();a1=a2!=a1;cbb000000988(a1);return;}}else s_cbb<oo2409?s_cbb<oo2410?(a1=cbb0000003180({}),a1=cbb0000003181(a2),a2=cbb0000003182(a1),a1=a2===a1,a3=cbb0000003183(a1)):(a1=shuz[start++],a2=cbb0000008014(constantPool[a1]),a1=cbb0000008015(a2),a2=cbb0000008016(a1),a1=a2===a1,a3=cbb0000008017(a1)):(a1=cbb0000007864(a3),a2=cbb0000007865(a1),a1=a1<a2,a1=cbb0000007866(a1),a1=cbb0000007867(a4),a2=cbb0000007868(a5),a1=a1-a2,a6=cbb0000007869(a1));}else if(s_cbb<oo2411){if(s_cbb<oo2412){if(s_cbb<oo2413){a5=cbb0000005056(a3);a4=cbb0000005057(!a5);a1=cbb0000005058(a3);a2=cbb0000005059(a3);a1=a1>a2;a2=cbb0000005060(a1);}else{a1=cbb000000709(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000710(a1)):a9=oo4829;;a1=cbb000000711(a2);a2=cbb000000712(a1);a1=a1/a2;a3=cbb000000713(a1);}}else s_cbb<oo2415?(a1=cbb0000007197(a3),a2=cbb0000007198(a1),a1=a1<a2,a1=cbb0000007199(a1),a1=cbb0000007200(a6),a2=cbb0000007201(a7),a1=a1|a2,a8=cbb0000007202(a1)):(a2=cbb0000001893([]),a1=cbb0000001894(a2),a2=cbb0000001895(a1),a1=a2===a1,a3=cbb0000001896(a1));}else s_cbb<oo2416?s_cbb<oo2417?(a1=cbb0000006689(a5),a2=cbb0000006690(a6),a1=a2!==a1,a1=cbb0000006691(a1),a1=cbb0000006692(a3),a2=cbb0000006693(a4),a1=a1%a2,a5=cbb0000006694(a1)):(a1=cbb0000003534(a2),a2=cbb0000003535(a1),a1=a2-a1,a1=cbb0000003536(a1),a5=cbb0000003537(a2),a2=cbb0000003538(-a5)):(function(){a1=cbb0000001704(a2);a2=cbb0000001705(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001706(a1);}(),a1=cbb0000001707(a2),a2=cbb0000001708(a1),a1=a2-a1,a1=cbb0000001709(a1));}else if(s_cbb<oo2418){s_cbb<oo2419?s_cbb<oo2420?s_cbb<oo2421?s_cbb<oo2422?(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004220(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004221(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004222(a4);}(),a1=cbb0000004223(a5),a2=cbb0000004224(a6),a1=a2!==a1,a1=cbb0000004225(a1)):(a1=cbb0000003967(a3),a2=cbb0000003968(a1),a1=a1<a2,a1=cbb0000003969(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003970()):a4.splice(oo4806,oo4806,cbb0000003971());}a1=cbb0000003972(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):s_cbb<oo2439?(a1=cbb0000006605(a2),a2=cbb0000006606(a1),a1=a2==a1,a4=cbb0000006607(a1),a1=cbb0000006608(a3),a2=cbb0000006609(a4),a1=a1^a2,a5=cbb0000006610(a1)):(a1=cbb0000002196(a3),a2=cbb0000002197(a4),a1=a1%a2,a5=cbb0000002198(a1),a1=cbb0000002199(a3),a2=cbb0000002200(a4),a3=cbb0000002201(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):s_cbb<oo2440?s_cbb<oo2441?(function(){a1=cbb0000002455(a2);throw a1;}(),a1=cbb0000002456(a2),a2=cbb0000002457(a1),a1=a2===a1,a3=cbb0000002458(a1)):(a1=cbb0000007001(a4),a2=cbb0000007002(a5),a1=a1-a2,a6=cbb0000007003(a1),a1=cbb0000007004(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):(a2=cbb0000001960(allthis),a2=cbb0000001961([])):s_cbb<oo2443?s_cbb<oo2444?s_cbb<oo2445?(a1=cbb0000004055(a3),a2=cbb0000004056(a4),a1=a1%a2,a5=cbb0000004057(a1),a2=cbb0000004058(allthis)):(a1=cbb0000002272(a5),a2=cbb0000002273(a6),a1=a2!==a1,a1=cbb0000002274(a1),a1=cbb0000002275(a3),a2=cbb0000002276(a4),a3=cbb0000002277(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):s_cbb<oo2446?(a1=cbb0000004782(a3),a2=cbb0000004783(a2),a1=a1<<a2,a1=cbb0000004784(a1),a1=cbb0000004785(a2),a2=cbb0000004786(a1),a1=a2==a1,a4=cbb0000004787(a1)):(a1=cbb000000128(a3),a2=cbb000000129(a1),a1=a1<a2,a1=cbb000000130(a1),a1=cbb000000131(a3),a2=cbb000000132(a1),a1=a1 in a2,a1=cbb000000133(a1)):s_cbb<oo2447?s_cbb<oo2448?(a1=shuz[start++],start+=a1,a5=cbb0000001494(a1),a2=cbb0000001495(void a5)):(a1=cbb0000001542({}),function(){a1=cbb0000001543(a2);a2=cbb0000001544(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001545(a1);}()):(a1=cbb000000543(a3),a2=cbb000000544(a4),a1=a1^a2,a5=cbb000000545(a1),a1=cbb000000546(a5),a2=cbb000000547(a3),a3=cbb000000548(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1);}else if(s_cbb<oo2449){if(s_cbb<oo2450){s_cbb<oo2451?s_cbb<oo2452?(a1=cbb0000001734(a6),a2=cbb0000001735(a1),a1=a1>>>a2,a2=cbb0000001736(a1),function(){a1=shuz[start++];a3=cbb0000001737(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001738(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001739(a4);}()):(j=cbb0000001157(a1),j2=cbb0000001158(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001159(a2),a2=cbb0000001160(a1),a1=a2===a1,a3=cbb0000001161(a1)):s_cbb<oo2456?(a5=cbb0000003544(a2),a2=cbb0000003545(-a5),a1=cbb0000003546(a2),a2=cbb0000003547(a1),a1=a2===a1,a3=cbb0000003548(a1)):(a1=cbb0000006091(a5),a2=cbb0000006092(a6),a1=a2*a1,a7=cbb0000006093(a1),a1=cbb0000006094(a6),a2=cbb0000006095(a7),a1=a1|a2,a8=cbb0000006096(a1));}else if(s_cbb<oo2457){if(s_cbb<oo2458){a1=cbb0000003009(a5);a2=cbb0000003010(a6);a1=a2!==a1;a1=cbb0000003011(a1);a1=cbb0000003012(a2);a2=cbb0000003013(a1);a1=a2==a1;a4=cbb0000003014(a1);}else{a5=cbb0000007118(a1);a2=cbb0000007119(void a5);a1=cbb0000007120(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else{a1=cbb0000006822(a3);a2=cbb0000006823(a1);a1=a1<a2;a1=cbb0000006824(a1);a1=cbb0000006825(a5);a2=cbb0000006826(a6);a1=a2*a1;a7=cbb0000006827(a1);}}else s_cbb<oo2460?s_cbb<oo2461?s_cbb<oo2462?(a1=cbb0000004(a3),a2=cbb0000005(a1),a1=a1 in a2,a1=cbb0000006(a1),a1=cbb0000007(a3),a2=cbb0000008(a1),a1=a1<a2,a1=cbb0000009(a1)):(a1=cbb000000342(a2),a2=cbb000000343(a1),a1=a2-a1,a1=cbb000000344(a1),a1=cbb000000345(a5),a2=cbb000000346(a6),a1=a2!==a1,a1=cbb000000347(a1)):(a1=cbb0000004093({}),a2=cbb0000004094(allthis)):s_cbb<oo2463?s_cbb<oo2464?(a1=cbb000000843(a2),a2=cbb000000844(a1),a1=a1/a2,a3=cbb000000845(a1),a1=cbb000000846(a5),a2=cbb000000847(a9),a1=a2<a1,a6=cbb000000848(a1)):(function(){a1=cbb0000001605(a2);a2=cbb0000001606(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001607(a1);}(),a1=cbb0000001608(a3),a2=cbb0000001609(a2),a1=a1<<a2,a1=cbb0000001610(a1)):(a1=cbb0000005187(a4),a2=cbb0000005188(a1),a1=a1>=a2,a2=cbb0000005189(a1),a2=cbb0000005190(allthis));}else if(s_cbb<oo2465){if(s_cbb<oo2466){if(s_cbb<oo2467){if(s_cbb<oo2468){s_cbb<oo2469?s_cbb<oo2470?(a1=cbb0000001933(),a2=cbb0000001934(),a1=a2!=a1,cbb0000001935(a1),a2=cbb0000001936([])):(a1=cbb0000007886(a3),a2=cbb0000007887(a4),a1=a1%a2,a5=cbb0000007888(a1),a1=cbb0000007889(a4),a2=cbb0000007890(a5),a1=a1-a2,a6=cbb0000007891(a1)):s_cbb<oo2471?(a1=shuz[start++],a2=cbb0000007445(a3),a3=cbb0000007446(a1),a2[constantPool[a1]]=a3,a1=cbb0000007447(a2),a2=cbb0000007448(a1),a1=a2==a1,a4=cbb0000007449(a1)):(function(){debugger;}(),a1=cbb000000475(a2),a2=cbb000000476(a1),a1=a2-a1,a1=cbb000000477(a1));}else if(s_cbb<oo2472){if(s_cbb<oo2473){a1=cbb0000007129(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007130(a2);a2=cbb0000007131(a3);a1=a1&a2;a3=cbb0000007132(a1);}else{a1=cbb000000686(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000687(a1)):a9=oo4829;;a1=cbb000000688(a6);a2=cbb000000689(a7);a1=a1|a2;a8=cbb000000690(a1);}}else{a1=cbb0000006453(a5);a2=cbb0000006454(a6);a1=a2!==a1;a1=cbb0000006455(a1);a1=cbb0000006456(a3);a2=cbb0000006457(a4);a1=a1^a2;a5=cbb0000006458(a1);}}else if(s_cbb<oo2476){s_cbb<oo2477?s_cbb<oo2478?(a1=cbb000000922(a2),a2=cbb000000923(a1),a1=a2-a1,a1=cbb000000924(a1),a1=cbb000000925(a5),a2=cbb000000926(a9),a1=a2<a1,a6=cbb000000927(a1)):(a1=cbb0000005367(cbbb),a2=cbb0000005368(a1),a1=a1<=a2,a5=cbb0000005369(a1),a1=cbb0000005370(a4),a2=cbb0000005371(a1),a1=a1>=a2,a2=cbb0000005372(a1)):s_cbb<oo2479?(a1=cbb000000294(),a2=cbb000000295(),a1=a2!=a1,cbb000000296(a1),a1=cbb000000297(a5),a2=cbb000000298(a9),a1=a2<a1,a6=cbb000000299(a1)):(a1=cbb0000001445(a5),a2=cbb0000001446(a9),a1=a2<a1,a6=cbb0000001447(a1),a1=cbb0000001448(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820);}else if(s_cbb<oo2481){s_cbb<oo2482?(a2=cbb000000107(allthis),a1=cbb000000108(a3),a2=cbb000000109(a1),a1=a1 in a2,a1=cbb000000110(a1)):(a1=cbb000000140(a4),a2=cbb000000141(a1),a1=a1>=a2,a2=cbb000000142(a1),a1=cbb000000143(a3),a2=cbb000000144(a1),a1=a1 in a2,a1=cbb000000145(a1));}else{a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001233(a2);a2=cbb0000001234(a3);a1=a1&a2;a3=cbb0000001235(a1);}}else if(s_cbb<oo2489){if(s_cbb<oo2490){if(s_cbb<oo2491){if(s_cbb<oo2492){(function(){a1=shuz[start++];a3=cbb0000001815(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001816(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001817(a4);})();a1=cbb0000001818(a3);a2=cbb0000001819(a3);a1=a1>a2;a2=cbb0000001820(a1);}else{a1=cbb000000994(a2);a2=cbb000000995(a1);a1=a2==a1;a4=cbb000000996(a1);return;}}else if(s_cbb<oo2496){a1=cbb0000002326(a5);a2=cbb0000002327(a9);a1=a2<a1;a6=cbb0000002328(a1);a5=cbb0000002329(a3);a4=cbb0000002330(!a5);}else{a1=cbb000000769(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000770(a1)):a9=oo4829;;a1=cbb000000771(a2);a2=cbb000000772(a1);a1=a1+a2;a1=cbb000000773(a1);}}else s_cbb<oo2498?s_cbb<oo2499?(a1=cbb0000004986(a3),a2=cbb0000004987(a3),a1=a1>a2,a2=cbb0000004988(a1),a1=cbb0000004989(a3),a2=cbb0000004990(a4),a1=a1^a2,a5=cbb0000004991(a1)):(a1=cbb000000567(a5),a2=cbb000000568(a3),a3=cbb000000569(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000570(a5),a2=cbb000000571(a6),a1=a2!==a1,a1=cbb000000572(a1)):(a1=cbb0000005633({}),a1=cbb0000005634(a6),a2=cbb0000005635(a1),a1=a1>>>a2,a2=cbb0000005636(a1));}else s_cbb<oo2500?s_cbb<oo2501?s_cbb<oo2502?(function(){debugger;}(),a5=cbb0000001333(a3),a4=cbb0000001334(!a5)):(a1=cbb0000005373(a5),a2=cbb0000005374(a9),a1=a2<a1,a6=cbb0000005375(a1),a1=cbb0000005376(cbbb),a2=cbb0000005377(a1),a1=a1<=a2,a5=cbb0000005378(a1)):s_cbb<oo2503?(a2=cbb0000001917([]),a1=cbb0000001918(cbbb),a2=cbb0000001919(a1),a1=a1<=a2,a5=cbb0000001920(a1)):(a2=cbb0000001925([]),a1=cbb0000001926(a6),a2=cbb0000001927(a7),a1=a1|a2,a8=cbb0000001928(a1)):s_cbb<oo2504?s_cbb<oo2505?(a1=cbb0000002520(a5),a2=cbb0000002521(a9),a1=a2<a1,a6=cbb0000002522(a1),function(){a1=cbb0000002523(a2);throw a1;}()):(a1=cbb0000008171(a3),a2=cbb0000008172(a4),a1=a1>>a2,a5=cbb0000008173(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008174(a1)):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008123(a1),a1=cbb0000008124(cbbb),a2=cbb0000008125(a1),a1=a1<=a2,a5=cbb0000008126(a1));}else if(s_cbb<oo2506){if(s_cbb<oo2507){if(s_cbb<oo2508){s_cbb<oo2509?s_cbb<oo2510?(a1=cbb0000006759(a2),a2=cbb0000006760(a1),a1=a1/a2,a3=cbb0000006761(a1),a1=cbb0000006762(a3),a2=cbb0000006763(a4),a1=a1%a2,a5=cbb0000006764(a1)):(a1=cbb0000007852(a4),a2=cbb0000007853(a5),a1=a1-a2,a6=cbb0000007854(a1),a1=cbb0000007855(a2),a2=cbb0000007856(a1),a1=a2===a1,a3=cbb0000007857(a1)):s_cbb<oo2511?(a1=cbb0000007234(a5),a2=cbb0000007235(a6),a1=a2!==a1,a1=cbb0000007236(a1),a1=cbb0000007237(a3),a2=cbb0000007238(a1),a1=a1<a2,a1=cbb0000007239(a1)):(a1=cbb0000001897({}),a2=cbb0000001898([]));}else if(s_cbb<oo2512){if(s_cbb<oo2513){a1=cbb000000794(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000795(a1)):a9=oo4829;;a1=cbb000000796(a4);a2=cbb000000797(a5);a1=a1-a2;a6=cbb000000798(a1);}else{a1=cbb0000006278(a2);a2=cbb0000006279(a1);a1=a1/a2;a3=cbb0000006280(a1);a1=cbb0000006281(a2);a2=cbb0000006282(a1);a1=a2==a1;a4=cbb0000006283(a1);}}else{a1=cbb0000005097(a3);a2=cbb0000005098(a4);a1=a1>>a2;a5=cbb0000005099(a1);a1=cbb0000005100(a4);a2=cbb0000005101(a1);a1=a1>=a2;a2=cbb0000005102(a1);}}else if(s_cbb<oo2515){if(s_cbb<oo2516){if(s_cbb<oo2517){(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004155(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004156(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004157(a4);})();a1=cbb0000004158();a2=cbb0000004159();a1=a2!=a1;cbb0000004160(a1);}else{a1=cbb0000007091(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a5=cbb0000007092(a1);a2=cbb0000007093(void a5);}}else s_cbb<oo2526?(a1=cbb0000008127(a3),a2=cbb0000008128(a2),a1=a1<<a2,a1=cbb0000008129(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008130(a1)):(a1=cbb0000005316(cbbb),a2=cbb0000005317(a1),a1=a1<=a2,a5=cbb0000005318(a1),a1=cbb0000005319(a5),a2=cbb0000005320(a6),a1=a2*a1,a7=cbb0000005321(a1));}else s_cbb<oo2527?s_cbb<oo2528?(a1=shuz[start++],start+=a1,a1=cbb0000001518(a3),a2=cbb0000001519(a3),a1=a1>a2,a2=cbb0000001520(a1)):(a1=cbb0000002096({}),a1=cbb0000002097(a3),a2=cbb0000002098(a4),a1=a1%a2,a5=cbb0000002099(a1)):(a5=cbb0000003744(a3),a1=cbb0000003745(typeof a5),a1=cbb0000003746(a3),a2=cbb0000003747(a2),a1=a1<<a2,a1=cbb0000003748(a1));}else if(s_cbb<oo2529){if(s_cbb<oo2530){s_cbb<oo2531?s_cbb<oo2532?(a5=cbb0000002299(a3),a4=cbb0000002300(!a5),a1=cbb0000002301(a3),a2=cbb0000002302(a4),a1=a1>>a2,a5=cbb0000002303(a1)):(a1=shuz[start++],a2=cbb0000008085(constantPool[a1]),a1=cbb0000008086(a3),a2=cbb0000008087(a4),a1=a1%a2,a5=cbb0000008088(a1)):s_cbb<oo2533?(a1=cbb0000005142(a4),a2=cbb0000005143(a1),a1=a1>=a2,a2=cbb0000005144(a1),a5=cbb0000005145(a3),a4=cbb0000005146(!a5)):(a5=cbb000000573(a1),a2=cbb000000574(void a5),a1=cbb000000575(a5),a2=cbb000000576(a3),a3=cbb000000577(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1);}else if(s_cbb<oo2534){if(s_cbb<oo2535){a1=cbb0000007155(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007156({});}else{a1=cbb0000007801(a4);a2=cbb0000007802(a5);a1=a1-a2;a6=cbb0000007803(a1);(function(){debugger;})();}}else{a1=cbb0000007880(a4);a2=cbb0000007881(a5);a1=a1-a2;a6=cbb0000007882(a1);a1=cbb0000007883(a3);a2=cbb0000007884(a3);a1=a1>a2;a2=cbb0000007885(a1);}}else if(s_cbb<oo2537){if(s_cbb<oo2538){if(s_cbb<oo2539){a1=cbb0000001039(a6);a2=cbb0000001040(a7);a1=a1|a2;a8=cbb0000001041(a1);return;}else{a1=cbb000000732(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000733(a1)):a9=oo4829;;(function(){debugger;})();}}else{a5=cbb0000004509(a3);a1=cbb0000004510(~a5);a1=cbb0000004511(a3);a2=cbb0000004512(a3);a1=a1>a2;a2=cbb0000004513(a1);}}else s_cbb<oo2541?s_cbb<oo2542?(a2=cbb0000001979([]),a1=cbb0000001980(a6),a2=cbb0000001981(a7),a1=a1|a2,a8=cbb0000001982(a1)):(function(){debugger;}(),a5=cbb0000004480(a3),a1=cbb0000004481(~a5)):(a1=cbb0000002647(cbbb),a2=cbb0000002648(a1),a1=a1<=a2,a5=cbb0000002649(a1),a5=cbb0000002650(a1),a2=cbb0000002651(void a5));}else if(s_cbb<oo2543){if(s_cbb<oo2544){if(s_cbb<oo2545){if(s_cbb<oo2546){if(s_cbb<oo2547){if(s_cbb<oo2548){if(s_cbb<oo2549){if(s_cbb<oo2550){if(s_cbb<oo2551){a1=cbb0000001253(a5);a2=cbb0000001254(a9);a1=a2<a1;a6=cbb0000001255(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}else{a1=cbb0000006999(a9);a3=shuz[start++];a1[constantPool[a3]]+=oo4828;a2=cbb0000007000(allthis);}}else s_cbb<oo2559?(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003973()):a4.splice(oo4806,oo4806,cbb0000003974());}a1=cbb0000003975(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003976(a5),a2=cbb0000003977(a6),a1=a2*a1,a7=cbb0000003978(a1)):(a1=cbb0000006015(a6),a2=cbb0000006016(a7),a1=a1|a2,a8=cbb0000006017(a1),a1=cbb0000006018(),a2=cbb0000006019(),a1=a2!=a1,cbb0000006020(a1));}else s_cbb<oo2569?s_cbb<oo2570?(a1=cbb0000006085(cbbb),a2=cbb0000006086(a1),a1=a1<=a2,a5=cbb0000006087(a1),a1=cbb0000006088(a6),a2=cbb0000006089(a7),a1=a1|a2,a8=cbb0000006090(a1)):(a5=cbb0000002316(a3),a4=cbb0000002317(!a5),a1=cbb0000002318(a6),a2=cbb0000002319(a7),a1=a1|a2,a8=cbb0000002320(a1)):(a1=cbb000000324(),a2=cbb000000325(),a1=a2!=a1,cbb000000326(a1),a1=cbb000000327(a3),a2=cbb000000328(a4),a1=a1%a2,a5=cbb000000329(a1));}else if(s_cbb<oo2571){s_cbb<oo2572?s_cbb<oo2573?(a1=cbb0000008290(a4),a2=cbb0000008291(a5),a1=a1-a2,a6=cbb0000008292(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008293(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb0000005492(a2),a2=cbb0000005493(a3),a1=a1&a2,a3=cbb0000005494(a1),a1=cbb0000005495(a2),a2=cbb0000005496(a3),a1=a1&a2,a3=cbb0000005497(a1)):s_cbb<oo2575?(a1=cbb00000060(a3),a2=cbb00000061(a1),a1=a1 in a2,a1=cbb00000062(a1),a1=cbb00000063(a3),a2=cbb00000064(a3),a1=a1>a2,a2=cbb00000065(a1)):(a1=cbb000000306(),a2=cbb000000307(),a1=a2!=a1,cbb000000308(a1),a1=cbb000000309(a6),a2=cbb000000310(a7),a1=a1|a2,a8=cbb000000311(a1));}else if(s_cbb<oo2576){if(s_cbb<oo2577){a1=cbb0000007083(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007084(a5);a2=cbb0000007085(a6);a1=a2!==a1;a1=cbb0000007086(a1);}else{a1=cbb0000002066({});a1=cbb0000002067(a3);a2=cbb0000002068(a4);a1=a1>>a2;a5=cbb0000002069(a1);}}else{a2=cbb0000004061(allthis);a1=cbb0000004062(a2);a2=cbb0000004063(a1);a1=a1/a2;a3=cbb0000004064(a1);}}else if(s_cbb<oo2579){s_cbb<oo2580?s_cbb<oo2581?s_cbb<oo2582?(a1=cbb000000969(cbbb),a2=cbb000000970(a1),a1=a1<=a2,a5=cbb000000971(a1),a1=cbb000000972(a5),a2=cbb000000973(a9),a1=a2<a1,a6=cbb000000974(a1)):(a1=cbb0000008252(a5),a2=cbb0000008253(a9),a1=a2<a1,a6=cbb0000008254(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008255(a2);cbbb[a9]=argsList[a2];}}()):s_cbb<oo2584?(a1=cbb0000005737(a2),a2=cbb0000005738(a3),a1=a1&a2,a3=cbb0000005739(a1),a1=cbb0000005740(a6),a2=cbb0000005741(a1),a1=a1>>>a2,a2=cbb0000005742(a1)):(a1=cbb0000005637(a5),a2=cbb0000005638(a6),a1=a2*a1,a7=cbb0000005639(a1),a1=cbb0000005640(a6),a2=cbb0000005641(a1),a1=a1>>>a2,a2=cbb0000005642(a1)):s_cbb<oo2585?s_cbb<oo2586?(a1=cbb0000008159(a3),a2=cbb0000008160(a4),a1=a1^a2,a5=cbb0000008161(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008162(a1)):(a1=cbb0000005743(a6),a2=cbb0000005744(a1),a1=a1>>>a2,a2=cbb0000005745(a1),a1=cbb0000005746(a3),a2=cbb0000005747(a4),a1=a1>>a2,a5=cbb0000005748(a1)):(a1=cbb0000006959(a5),a2=cbb0000006960(a6),a1=a2*a1,a7=cbb0000006961(a1),a1=cbb0000006962(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828);}else if(s_cbb<oo2588){if(s_cbb<oo2589){if(s_cbb<oo2590){a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008447(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008448(a6);a2=cbb0000008449(a1);a1=a1>>>a2;a2=cbb0000008450(a1);}else{a1=cbb0000006021(a4);a2=cbb0000006022(a5);a1=a1-a2;a6=cbb0000006023(a1);a1=cbb0000006024(a6);a2=cbb0000006025(a7);a1=a1|a2;a8=cbb0000006026(a1);}}else if(s_cbb<oo2602){a1=cbb0000004592(a3);a2=cbb0000004593(a1);a2.push(a1);a1=cbb0000004594(a2);a1=cbb0000004595(a3);a2=cbb0000004596(a2);a1=a1<<a2;a1=cbb0000004597(a1);}else{a1=cbb0000003474(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003475(a1)):a9=oo4829;;a1=cbb0000003476(a5);a2=cbb0000003477(a9);a1=a2<a1;a6=cbb0000003478(a1);}}else s_cbb<oo2604?s_cbb<oo2605?(function(){a1=cbb0000002441(a2);throw a1;}(),a1=cbb0000002442(a5),a2=cbb0000002443(a9),a1=a2<a1,a6=cbb0000002444(a1)):(a1=cbb000000445(a2),a2=cbb000000446(a1),a1=a1/a2,a3=cbb000000447(a1),a1=cbb000000448(a2),a2=cbb000000449(a1),a1=a2-a1,a1=cbb000000450(a1)):(a1=cbb0000004938(a3),a2=cbb0000004939(a3),a1=a1>a2,a2=cbb0000004940(a1),a1=cbb0000004941(a2),a2=cbb0000004942(a1),a1=a1/a2,a3=cbb0000004943(a1));}else if(s_cbb<oo2606){if(s_cbb<oo2607){if(s_cbb<oo2608){if(s_cbb<oo2609){s_cbb<oo2610?(a1=cbb0000002152(a3),a2=cbb0000002153(a4),a3=cbb0000002154(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002155(a2),a2=cbb0000002156(a1),a1=a1/a2,a3=cbb0000002157(a1)):(a1=cbb0000004421(a2),a2=cbb0000004422(a3),a3=cbb0000004423(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004424(a3),a2=cbb0000004425([]));}else if(s_cbb<oo2611){a1=cbb0000003021(a5);a2=cbb0000003022(a6);a1=a2!==a1;a1=cbb0000003023(a1);a1=cbb0000003024(a5);a2=cbb0000003025(a6);a1=a2*a1;a7=cbb0000003026(a1);}else{a1=cbb0000001261(a6);a2=cbb0000001262(a1);a1=a1>>>a2;a2=cbb0000001263(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo2618?s_cbb<oo2619?(all=cbbb,a1=cbb0000007357(cbbb),a1=cbb0000007358(a4),a2=cbb0000007359(a5),a1=a1-a2,a6=cbb0000007360(a1)):(a1=cbb0000005109(a4),a2=cbb0000005110(a1),a1=a1>=a2,a2=cbb0000005111(a1),a1=cbb0000005112(a4),a2=cbb0000005113(a1),a1=a1>=a2,a2=cbb0000005114(a1)):(a1=cbb0000001515(a4),a2=cbb0000001516(a5),a1=a1-a2,a6=cbb0000001517(a1),a1=shuz[start++],start+=a1);}else s_cbb<oo2620?s_cbb<oo2621?s_cbb<oo2622?(a1=cbb0000006131(a3),a2=cbb0000006132(a2),a1=a1<<a2,a1=cbb0000006133(a1),a1=cbb0000006134(a2),a2=cbb0000006135(a1),a1=a1/a2,a3=cbb0000006136(a1)):(function(){a1=shuz[start++];a3=cbb0000001869(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001870(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001871(a4);}(),a2=cbb0000001872(allthis)):s_cbb<oo2626?(a1=cbb0000002919(cbbb),a2=cbb0000002920(a1),a1=a1<=a2,a5=cbb0000002921(a1),a1=cbb0000002922(a3),a2=cbb0000002923(a1),a3=delete a2[a1],a1=cbb0000002924(a3)):(a1=cbb000000483(a2),a2=cbb000000484(a1),a1=a2-a1,a1=cbb000000485(a1),a1=cbb000000486(a2),a2=cbb000000487(a1),a1=a2-a1,a1=cbb000000488(a1)):s_cbb<oo2627?s_cbb<oo2628?(a1=cbb0000003584(a3),a2=cbb0000003585(a4),a1=a1%a2,a5=cbb0000003586(a1),a5=cbb0000003587(a2),a2=cbb0000003588(-a5)):(a1=cbb0000003039(a5),a2=cbb0000003040(a6),a1=a2!==a1,a1=cbb0000003041(a1),a1=cbb0000003042(a6),a2=cbb0000003043(a7),a1=a1|a2,a8=cbb0000003044(a1)):(a1=cbb000000170(a3),a2=cbb000000171(a1),a1=a1 in a2,a1=cbb000000172(a1),a1=cbb000000173(a6),a2=cbb000000174(a7),a1=a1|a2,a8=cbb000000175(a1));}else if(s_cbb<oo2629){if(s_cbb<oo2630){if(s_cbb<oo2631){s_cbb<oo2632?(a1=cbb0000005191(a4),a2=cbb0000005192(a1),a1=a1>=a2,a2=cbb0000005193(a1),a1=cbb0000005194(a6),a2=cbb0000005195(a7),a1=a1|a2,a8=cbb0000005196(a1)):(a1=cbb0000001097(a3),a2=cbb0000001098(a3),a1=a1>a2,a2=cbb0000001099(a1),j=cbb0000001100(a1),j2=cbb0000001101(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1);}else if(s_cbb<oo2633){a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008399(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008400(a3);a2=cbb0000008401(a1);a1=a1<a2;a1=cbb0000008402(a1);}else{a5=cbb0000006880(a3);a4=cbb0000006881(!a5);a1=cbb0000006882(a5);a2=cbb0000006883(a6);a1=a2*a1;a7=cbb0000006884(a1);}}else s_cbb<oo2645?s_cbb<oo2646?(a1=cbb0000001611(a2),a2=cbb0000001612(a1),a1=a1/a2,a3=cbb0000001613(a1),function(){a1=cbb0000001614(a2);a2=cbb0000001615(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001616(a1);}()):(a5=cbb0000003831(a3),a4=cbb0000003832(!a5),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003833()):a4.splice(oo4806,oo4806,cbb0000003834());}a1=cbb0000003835(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a1=cbb0000006109(a3),a2=cbb0000006110(a2),a1=a1<<a2,a1=cbb0000006111(a1),a1=cbb0000006112(a2),a2=cbb0000006113(a3),a1=a1&a2,a3=cbb0000006114(a1));}else s_cbb<oo2656?s_cbb<oo2657?s_cbb<oo2658?(a1=cbb0000008053(),a2=cbb0000008054(),a1=a2!=a1,cbb0000008055(a1),a1=shuz[start++],a2=cbb0000008056(constantPool[a1])):(a1=cbb0000004208(a5),a2=cbb0000004209(a6),a1=a2*a1,a7=cbb0000004210(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004211(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004212(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004213(a4);}()):s_cbb<oo2666?(a2=cbb000000626([]),a1=cbb000000627(a5),a2=cbb000000628(a3),a3=cbb000000629(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb0000006948({}),a1=cbb0000006949(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):s_cbb<oo2668?s_cbb<oo2669?(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008154(a1),a5=cbb0000008155(a1),a2=cbb0000008156(void a5)):(a1=cbb0000001776(a4),a2=cbb0000001777(a5),a1=a1-a2,a6=cbb0000001778(a1),function(){a1=shuz[start++];a3=cbb0000001779(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001780(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001781(a4);}()):(a1=cbb0000007038(a6),a2=cbb0000007039(a7),a1=a1|a2,a8=cbb0000007040(a1),a1=cbb0000007041(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828);}else if(s_cbb<oo2674){if(s_cbb<oo2675){if(s_cbb<oo2676){if(s_cbb<oo2677){if(s_cbb<oo2678){s_cbb<oo2679?(a1=cbb0000001377(a3),a2=cbb0000001378(a1),a1=a1<a2,a1=cbb0000001379(a1),a1=cbb0000001380(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):(a1=cbb0000008146(a3),a2=cbb0000008147(a4),a1=a1>>a2,a5=cbb0000008148(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008149(a1));}else if(s_cbb<oo2681){a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008349(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008350(a3);a2=cbb0000008351(a3);a1=a1>a2;a2=cbb0000008352(a1);}else{a1=cbb0000001250(a3);a2=cbb0000001251(a3);a1=a1>a2;a2=cbb0000001252(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo2699?s_cbb<oo2700?(a1=cbb0000004242(cbbb),a2=cbb0000004243(a1),a1=a1<=a2,a5=cbb0000004244(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004245(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004246(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004247(a4);}()):(a1=cbb0000003367(a2),a2=cbb0000003368(a1),a1=a2===a1,a3=cbb0000003369(a1),a1=cbb0000003370(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=cbb0000007908(a2),a2=cbb0000007909(a1),a1=a2==a1,a4=cbb0000007910(a1),a1=shuz[start++],a2=cbb0000007911(a1));}else s_cbb<oo2709?s_cbb<oo2710?s_cbb<oo2711?(a1=cbb0000003152(a6),a2=cbb0000003153(a7),a1=a1|a2,a8=cbb0000003154(a1),a1=cbb0000003155(a2),a2=cbb0000003156(a1),a1=a2===a1,a3=cbb0000003157(a1)):(a1=cbb0000003698(a3),a2=cbb0000003699(a4),a1=a1%a2,a5=cbb0000003700(a1),a5=cbb0000003701(a3),a1=cbb0000003702(typeof a5)):s_cbb<oo2712?(a1=shuz[start++],a2=cbb0000008041(constantPool[a1]),a1=cbb0000008042(a2),a2=cbb0000008043(a1),a1=a2==a1,a4=cbb0000008044(a1)):(a5=cbb0000007371(a1),a2=cbb0000007372(void a5),all=cbbb,a1=cbb0000007373(cbbb)):s_cbb<oo2713?s_cbb<oo2714?(a1=cbb0000006115(a3),a2=cbb0000006116(a2),a1=a1<<a2,a1=cbb0000006117(a1),a1=cbb0000006118(a3),a2=cbb0000006119(a2),a1=a1<<a2,a1=cbb0000006120(a1)):(a1=cbb0000006942(a5),a2=cbb0000006943(a6),a1=a2*a1,a7=cbb0000006944(a1),a1=cbb0000006945(cbbb),a2=cbb0000006946(a1),a1=a1<=a2,a5=cbb0000006947(a1)):(a1=cbb0000007533(a3),a2=cbb0000007534(a4),a1=a1>>a2,a5=cbb0000007535(a1),a1=shuz[start++],a2=cbb0000007536(a3),a3=cbb0000007537(a1),a2[constantPool[a1]]=a3);}else if(s_cbb<oo2715){s_cbb<oo2716?s_cbb<oo2717?s_cbb<oo2718?(a1=cbb0000006468(a3),a2=cbb0000006469(a4),a1=a1^a2,a5=cbb0000006470(a1),a1=cbb0000006471(a3),a2=cbb0000006472(a3),a1=a1>a2,a2=cbb0000006473(a1)):(a1=cbb0000005165(a4),a2=cbb0000005166(a1),a1=a1>=a2,a2=cbb0000005167(a1),a1=cbb0000005168(a3),a2=cbb0000005169(a4),a1=a1%a2,a5=cbb0000005170(a1)):s_cbb<oo2719?(a1=cbb0000005882(a3),a2=cbb0000005883(a4),a1=a1>>a2,a5=cbb0000005884(a1),a1=cbb0000005885(a4),a2=cbb0000005886(a1),a1=a1>=a2,a2=cbb0000005887(a1)):(a1=cbb000000469(a2),a2=cbb000000470(a1),a1=a2-a1,a1=cbb000000471(a1),a1=cbb000000472(a4),a2=cbb000000473(a5),a1=a1-a2,a6=cbb000000474(a1)):s_cbb<oo2720?s_cbb<oo2721?(a1=cbb0000006870(a5),a2=cbb0000006871(a6),a1=a2*a1,a7=cbb0000006872(a1),a1=cbb0000006873({})):(a1=shuz[start++],a2=cbb0000007961(a1),a1=cbb0000007962(a2),a2=cbb0000007963(a1),a1=a2==a1,a4=cbb0000007964(a1)):(a1=cbb0000008182(a2),a2=cbb0000008183(a1),a1=a2-a1,a1=cbb0000008184(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008185(a1));}else if(s_cbb<oo2722){if(s_cbb<oo2723){s_cbb<oo2724?(a1=cbb000000401(a2),a2=cbb000000402(a1),a1=a2-a1,a1=cbb000000403(a1),a1=cbb000000404(a2),a2=cbb000000405(a1),a1=a2==a1,a4=cbb000000406(a1)):(a2=cbb0000001987([]),a1=cbb0000001988(a3),a2=cbb0000001989(a4),a1=a1%a2,a5=cbb0000001990(a1));}else if(s_cbb<oo2725){a1=cbb0000004598(a3);a2=cbb0000004599(a1);a2.push(a1);a1=cbb0000004600(a2);a1=cbb0000004601(a3);a2=cbb0000004602(a3);a1=a1>a2;a2=cbb0000004603(a1);}else{a1=cbb0000007057(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a5=cbb0000007058(a3);a4=cbb0000007059(!a5);}}else if(s_cbb<oo2727){s_cbb<oo2728?(function(){debugger;}(),a1=cbb0000007013(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):(a2=cbb0000004910([]),a1=cbb0000004911(a2),a2=cbb0000004912(a1),a1=a2==a1,a4=cbb0000004913(a1));}else{a1=cbb0000003479(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003480(a1)):a9=oo4829;;a1=cbb0000003481(a6);a2=cbb0000003482(a1);a1=a1>>>a2;a2=cbb0000003483(a1);}}else if(s_cbb<oo2731){if(s_cbb<oo2732){if(s_cbb<oo2733){if(s_cbb<oo2734){s_cbb<oo2735?(function(){a1=cbb0000002498(a2);throw a1;}(),a1=cbb0000002499(a2),a2=cbb0000002500(a1),a1=a1/a2,a3=cbb0000002501(a1)):(a1=cbb0000007240(a3),a2=cbb0000007241(a1),a1=a1<a2,a1=cbb0000007242(a1),a1=cbb0000007243(a4),a2=cbb0000007244(a1),a1=a1>=a2,a2=cbb0000007245(a1));}else if(s_cbb<oo2736){a1=cbb0000001221(a4);a2=cbb0000001222(a5);a1=a1-a2;a6=cbb0000001223(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}else{a5=cbb0000003594(a2);a2=cbb0000003595(-a5);a1=cbb0000003596(a6);a2=cbb0000003597(a1);a1=a1>>>a2;a2=cbb0000003598(a1);}}else if(s_cbb<oo2743){if(s_cbb<oo2744){a1=cbb0000007071(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a5=cbb0000007072(a1);a2=cbb0000007073(void a5);}else{a1=cbb0000004045(a2);a2=cbb0000004046(a1);a1=a2===a1;a3=cbb0000004047(a1);a2=cbb0000004048(allthis);}}else{a5=cbb0000005986(a1);a2=cbb0000005987(void a5);a1=cbb0000005988(a6);a2=cbb0000005989(a7);a1=a1|a2;a8=cbb0000005990(a1);}}else if(s_cbb<oo2746){if(s_cbb<oo2747){if(s_cbb<oo2748){a1=cbb0000003412(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003413(a1)):a9=oo4829;;a1=cbb0000003414(a2);a2=cbb0000003415(a1);a1=a2==a1;a4=cbb0000003416(a1);}else{a1=cbb0000001428(a2);a2=cbb0000001429(a1);a1=a2==a1;a4=cbb0000001430(a1);a1=cbb0000001431(a2);a3=shuz[start++];a1?start+=a3:a9=oo4820;}}else s_cbb<oo2751?(a1=cbb000000478(a2),a2=cbb000000479(a1),a1=a2-a1,a1=cbb000000480(a1),a5=cbb000000481(a1),a2=cbb000000482(void a5)):(a1=cbb0000004806(a2),a2=cbb0000004807(a1),a1=a1+a2,a1=cbb0000004808(a1),a1=cbb0000004809(a2),a2=cbb0000004810(a1),a1=a2==a1,a4=cbb0000004811(a1));}else if(s_cbb<oo2752){if(s_cbb<oo2753){a1=cbb0000002266(a3);a2=cbb0000002267(a4);a3=cbb0000002268(a5);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000002269(a4);a2=cbb0000002270(a1);a1=a1>=a2;a2=cbb0000002271(a1);}else{a1=cbb000000675(a5);a2=cbb000000676(a9);a1=a2<a1;a6=cbb000000677(a1);a1=cbb000000678(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000679(a1)):a9=oo4829;;}}else{a1=cbb0000006874(a3);a2=cbb0000006875(a4);a1=a1>>a2;a5=cbb0000006876(a1);a1=cbb0000006877(a5);a2=cbb0000006878(a6);a1=a2*a1;a7=cbb0000006879(a1);}}else if(s_cbb<oo2755){s_cbb<oo2756?s_cbb<oo2757?s_cbb<oo2758?(a1=cbb000000360(a2),a2=cbb000000361(a1),a1=a2-a1,a1=cbb000000362(a1),a5=cbb000000363(a1),a2=cbb000000364(void a5)):(a1=cbb000000164(a3),a2=cbb000000165(a1),a1=a1 in a2,a1=cbb000000166(a1),a1=cbb000000167(a3),a2=cbb000000168(a2),a1=a1<<a2,a1=cbb000000169(a1)):s_cbb<oo2759?(a1=cbb0000002857(a3),a2=cbb0000002858(a1),a3=delete a2[a1],a1=cbb0000002859(a3),a1=cbb0000002860(a6),a2=cbb0000002861(a1),a1=a1>>>a2,a2=cbb0000002862(a1)):(a1=cbb0000006354(a2),a2=cbb0000006355(a1),a1=a1/a2,a3=cbb0000006356(a1),a1=cbb0000006357(a3),a2=cbb0000006358(a2),a1=a1<<a2,a1=cbb0000006359(a1)):s_cbb<oo2760?s_cbb<oo2761?(a1=cbb000000371(a2),a2=cbb000000372(a1),a1=a2-a1,a1=cbb000000373(a1),a1=cbb000000374(a2),a2=cbb000000375(a1),a1=a2===a1,a3=cbb000000376(a1)):(a1=cbb0000007287(a6),a2=cbb0000007288(a7),a1=a1|a2,a8=cbb0000007289(a1),a1=cbb0000007290(a3),a2=cbb0000007291(a1),a1=a1<a2,a1=cbb0000007292(a1)):(all=cbbb,a1=cbb0000007374(cbbb),a1=cbb0000007375(a3),a2=cbb0000007376(a4),a1=a1%a2,a5=cbb0000007377(a1));}else if(s_cbb<oo2762){if(s_cbb<oo2763){if(s_cbb<oo2764){a1=cbb0000002549({});(function(){i=[];a2=cbb0000002550(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;})();}else{a1=cbb0000001033(a5);a2=cbb0000001034(a9);a1=a2<a1;a6=cbb0000001035(a1);return;}}else{a1=cbb0000006214(a5);a2=cbb0000006215(a9);a1=a2<a1;a6=cbb0000006216(a1);a1=cbb0000006217(a3);a2=cbb0000006218(a2);a1=a1<<a2;a1=cbb0000006219(a1);}}else if(s_cbb<oo2765){if(s_cbb<oo2766){a1=cbb0000007005(a3);a2=cbb0000007006(a1);a1=a1<a2;a1=cbb0000007007(a1);a1=cbb0000007008(a9);a3=shuz[start++];a1[constantPool[a3]]+=oo4828;}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008385(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008386(a5);a2=cbb0000008387(a6);a1=a2*a1;a7=cbb0000008388(a1);}}else{a2=cbb0000006065(allthis);a1=cbb0000006066(a6);a2=cbb0000006067(a7);a1=a1|a2;a8=cbb0000006068(a1);}}else if(s_cbb<oo2779){if(s_cbb<oo2780){if(s_cbb<oo2781){if(s_cbb<oo2782){if(s_cbb<oo2783){if(s_cbb<oo2784){if(s_cbb<oo2785){a1=cbb0000007458(a5);a2=cbb0000007459(a9);a1=a2<a1;a6=cbb0000007460(a1);a1=shuz[start++];a2=cbb0000007461(a3);a3=cbb0000007462(a1);a2[constantPool[a1]]=a3;}else{a1=cbb0000008427(a5);a2=cbb0000008428(a6);a1=a2*a1;a7=cbb0000008429(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008430(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo2797?(a1=cbb0000006668(a4),a2=cbb0000006669(a1),a1=a1>=a2,a2=cbb0000006670(a1),a1=cbb0000006671(a3),a2=cbb0000006672(a4),a1=a1%a2,a5=cbb0000006673(a1)):(a1=cbb0000004901(a2),a2=cbb0000004902(a1),a1=a2==a1,a4=cbb0000004903(a1),a5=cbb0000004904(a1),a2=cbb0000004905(void a5));}else s_cbb<oo2798?s_cbb<oo2799?(a1=cbb0000004364(a2),a2=cbb0000004365(a3),a3=cbb0000004366(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004367(a3),a1=cbb0000004368(a2),a2=cbb0000004369(a1),a1=a1+a2,a1=cbb0000004370(a1)):(a1=cbb0000001294(a2),a2=cbb0000001295(a1),a1=a2-a1,a1=cbb0000001296(a1),function(){debugger;}()):(a1=cbb0000002248(a3),a2=cbb0000002249(a4),a3=cbb0000002250(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002251(a3),a2=cbb0000002252(a3),a1=a1>a2,a2=cbb0000002253(a1));}else if(s_cbb<oo2800){s_cbb<oo2801?s_cbb<oo2802?(a5=cbb0000004503(a3),a1=cbb0000004504(~a5),function(){debugger;}()):(a1=cbb0000004081(a3),a2=cbb0000004082(a4),a1=a1>>a2,a5=cbb0000004083(a1),a2=cbb0000004084(allthis)):s_cbb<oo2803?(a1=cbb0000005509(a4),a2=cbb0000005510(a1),a1=a1>=a2,a2=cbb0000005511(a1),a1=cbb0000005512(a2),a2=cbb0000005513(a3),a1=a1&a2,a3=cbb0000005514(a1)):(a1=cbb0000007416(a5),a2=cbb0000007417(a6),a1=a2*a1,a7=cbb0000007418(a1),all=cbbb,a1=cbb0000007419(cbbb));}else if(s_cbb<oo2804){s_cbb<oo2805?(a1=cbb0000002100(a4),a2=cbb0000002101(a5),a1=a1-a2,a6=cbb0000002102(a1),a1=cbb0000002103({})):(a1=cbb0000007553(a2),a2=cbb0000007554(a1),a1=a2-a1,a1=cbb0000007555(a1),a1=shuz[start++],a2=cbb0000007556(a3),a3=cbb0000007557(a1),a2[constantPool[a1]]=a3);}else{a2=cbb000000984(allthis);return;}}else if(s_cbb<oo2806){if(s_cbb<oo2807){if(s_cbb<oo2808){s_cbb<oo2809?(a1=cbb0000006552(a5),a2=cbb0000006553(a6),a1=a2!==a1,a1=cbb0000006554(a1),a1=cbb0000006555(a3),a2=cbb0000006556(a4),a1=a1^a2,a5=cbb0000006557(a1)):(a1=cbb0000005852(a3),a2=cbb0000005853(a4),a1=a1>>a2,a5=cbb0000005854(a1),a1=cbb0000005855(a2),a2=cbb0000005856(a1),a1=a2==a1,a4=cbb0000005857(a1));}else if(s_cbb<oo2810){a1=cbb000000714(a2);a2=cbb000000715(a1);a1=a2===a1;a3=cbb000000716(a1);a1=cbb000000717(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000718(a1)):a9=oo4829;;}else{a1=cbb0000005486(a2);a2=cbb0000005487(a3);a1=a1&a2;a3=cbb0000005488(a1);a1=cbb0000005489(a2);a2=cbb0000005490(a1);a1=a2===a1;a3=cbb0000005491(a1);}}else s_cbb<oo2812?s_cbb<oo2813?(a1=cbb0000006220(a2),a2=cbb0000006221(a3),a1=a1&a2,a3=cbb0000006222(a1),a1=cbb0000006223(a3),a2=cbb0000006224(a2),a1=a1<<a2,a1=cbb0000006225(a1)):(a5=cbb0000005462(a1),a2=cbb0000005463(void a5),a1=cbb0000005464(a2),a2=cbb0000005465(a3),a1=a1&a2,a3=cbb0000005466(a1)):(a1=cbb0000004314(a2),a2=cbb0000004315(a3),a3=cbb0000004316(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004317(a3),a5=cbb0000004318(a1),a2=cbb0000004319(void a5));}else s_cbb<oo2814?s_cbb<oo2815?s_cbb<oo2816?(a1=cbb0000005004(a3),a2=cbb0000005005(a3),a1=a1>a2,a2=cbb0000005006(a1),function(){debugger;}()):(a1=cbb0000006483(a3),a2=cbb0000006484(a4),a1=a1^a2,a5=cbb0000006485(a1),a1=cbb0000006486(a6),a2=cbb0000006487(a7),a1=a1|a2,a8=cbb0000006488(a1)):s_cbb<oo2817?(a1=cbb0000007293(a3),a2=cbb0000007294(a1),a1=a1<a2,a1=cbb0000007295(a1),a1=cbb0000007296(a3),a2=cbb0000007297(a3),a1=a1>a2,a2=cbb0000007298(a1)):(j=cbb0000001102(a1),j2=cbb0000001103(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a5=cbb0000001104(a1),a2=cbb0000001105(void a5)):s_cbb<oo2818?s_cbb<oo2819?(function(){i=[];a2=cbb0000002533(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002534(a3),a2=cbb0000002535(a4),a1=a1>>a2,a5=cbb0000002536(a1)):(a1=cbb0000001402(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,function(){debugger;}()):(a1=cbb0000006800(a5),a2=cbb0000006801(a6),a1=a2*a1,a7=cbb0000006802(a1),a1=cbb0000006803(a3),a2=cbb0000006804(a2),a1=a1<<a2,a1=cbb0000006805(a1));}else if(s_cbb<oo2821){if(s_cbb<oo2822){s_cbb<oo2823?s_cbb<oo2824?s_cbb<oo2825?(a1=cbb0000001851(cbbb),a2=cbb0000001852(a1),a1=a1<=a2,a5=cbb0000001853(a1),function(){a1=shuz[start++];a3=cbb0000001854(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001855(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001856(a4);}()):(a1=cbb0000007768(a3),a2=cbb0000007769(a2),a1=a1<<a2,a1=cbb0000007770(a1),a1=cbb0000007771(a4),a2=cbb0000007772(a5),a1=a1-a2,a6=cbb0000007773(a1)):s_cbb<oo2829?(a1=cbb0000004287(a6),a2=cbb0000004288(a7),a1=a1|a2,a8=cbb0000004289(a1),a1=cbb0000004290(a2),a2=cbb0000004291(a3),a3=cbb0000004292(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004293(a3)):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004125(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004126(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004127(a4);}(),a1=cbb0000004128(a3),a2=cbb0000004129(a1),a1=a1<a2,a1=cbb0000004130(a1)):s_cbb<oo2837?s_cbb<oo2838?(a1=cbb0000001863(a5),a2=cbb0000001864(a6),a1=a2*a1,a7=cbb0000001865(a1),function(){a1=shuz[start++];a3=cbb0000001866(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001867(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001868(a4);}()):(a1=cbb0000002818(a3),a2=cbb0000002819(a2),a1=a1<<a2,a1=cbb0000002820(a1),a1=cbb0000002821(a3),a2=cbb0000002822(a1),a3=delete a2[a1],a1=cbb0000002823(a3)):(a1=cbb000000260(),a2=cbb000000261(),a1=a2!=a1,cbb000000262(a1),a1=cbb000000263(a3),a2=cbb000000264(a4),a1=a1%a2,a5=cbb000000265(a1));}else if(s_cbb<oo2842){if(s_cbb<oo2843){if(s_cbb<oo2844){a1=cbb0000007157(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007158(a6);a2=cbb0000007159(a1);a1=a1>>>a2;a2=cbb0000007160(a1);}else{all=cbbb;a1=cbb0000007384(cbbb);a1=cbb0000007385(a6);a2=cbb0000007386(a1);a1=a1>>>a2;a2=cbb0000007387(a1);}}else s_cbb<oo2846?(function(){a1=shuz[start++];a3=cbb0000001827(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001828(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001829(a4);}(),a1=cbb0000001830(a2),a2=cbb0000001831(a1),a1=a1/a2,a3=cbb0000001832(a1)):(a1=cbb0000006546(a3),a2=cbb0000006547(a4),a1=a1^a2,a5=cbb0000006548(a1),a1=cbb0000006549(cbbb),a2=cbb0000006550(a1),a1=a1<=a2,a5=cbb0000006551(a1));}else s_cbb<oo2850?s_cbb<oo2851?(a5=cbb0000001322(a1),a2=cbb0000001323(void a5),function(){debugger;}()):(a1=cbb0000005426(a6),a2=cbb0000005427(a1),a1=a1>>>a2,a2=cbb0000005428(a1),a1=cbb0000005429(cbbb),a2=cbb0000005430(a1),a1=a1<=a2,a5=cbb0000005431(a1)):(a2=cbb0000003076([]),a1=cbb0000003077(a5),a2=cbb0000003078(a6),a1=a2!==a1,a1=cbb0000003079(a1));}else if(s_cbb<oo2852){s_cbb<oo2853?s_cbb<oo2854?s_cbb<oo2855?(a1=cbb0000004800(a2),a2=cbb0000004801(a1),a1=a2===a1,a3=cbb0000004802(a1),a1=cbb0000004803(a2),a2=cbb0000004804(a1),a1=a2==a1,a4=cbb0000004805(a1)):(a1=cbb0000003084(a5),a2=cbb0000003085(a6),a1=a2!==a1,a1=cbb0000003086(a1),a1=cbb0000003087(a5),a2=cbb0000003088(a9),a1=a2<a1,a6=cbb0000003089(a1)):s_cbb<oo2856?(a1=cbb0000005171({}),a1=cbb0000005172(a4),a2=cbb0000005173(a1),a1=a1>=a2,a2=cbb0000005174(a1)):(a1=cbb0000003374(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003375(a2),a2=cbb0000003376(a1),a1=a2===a1,a3=cbb0000003377(a1)):s_cbb<oo2858?s_cbb<oo2859?(a5=cbb0000004498(a3),a1=cbb0000004499(~a5),a1=cbb0000004500(a3),a2=cbb0000004501(a3),a1=a1>a2,a2=cbb0000004502(a1)):(a1=cbb000000122(a3),a2=cbb000000123(a1),a1=a1 in a2,a1=cbb000000124(a1),a1=cbb000000125(a3),a2=cbb000000126(a4),a1=a1^a2,a5=cbb000000127(a1)):(a1=cbb0000004587(cbbb),a2=cbb0000004588(a1),a1=a1<=a2,a5=cbb0000004589(a1),a5=cbb0000004590(a3),a1=cbb0000004591(~a5));}else if(s_cbb<oo2860){s_cbb<oo2861?s_cbb<oo2862?(a1=cbb0000006908(a5),a2=cbb0000006909(a6),a1=a2*a1,a7=cbb0000006910(a1),a1=cbb0000006911(a3),a2=cbb0000006912(a2),a1=a1<<a2,a1=cbb0000006913(a1)):(a1=cbb0000006477(a3),a2=cbb0000006478(a4),a1=a1^a2,a5=cbb0000006479(a1),a1=cbb0000006480(a3),a2=cbb0000006481(a4),a1=a1^a2,a5=cbb0000006482(a1)):(a5=cbb0000003693(a3),a1=cbb0000003694(typeof a5),a1=cbb0000003695(a5),a2=cbb0000003696(a6),a1=a2!==a1,a1=cbb0000003697(a1));}else if(s_cbb<oo2863){s_cbb<oo2864?(a1=cbb0000001369(a5),a2=cbb0000001370(a6),a1=a2!==a1,a1=cbb0000001371(a1),a1=cbb0000001372(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):(a1=cbb0000002891(a5),a2=cbb0000002892(a6),a1=a2!==a1,a1=cbb0000002893(a1),a1=cbb0000002894(a3),a2=cbb0000002895(a1),a3=delete a2[a1],a1=cbb0000002896(a3));}else{a1=cbb0000007087(a4);a2=cbb0000007088(a1);a1=a1>=a2;a2=cbb0000007089(a1);a1=cbb0000007090(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else if(s_cbb<oo2867){if(s_cbb<oo2868){s_cbb<oo2869?s_cbb<oo2870?s_cbb<oo2871?s_cbb<oo2872?(a1=cbb0000004676(a3),a2=cbb0000004677(a1),a2.push(a1),a1=cbb0000004678(a2),a5=cbb0000004679(a3),a4=cbb0000004680(!a5)):(a5=cbb0000003569(a2),a2=cbb0000003570(-a5),a1=cbb0000003571(a2),a2=cbb0000003572(a1),a1=a1/a2,a3=cbb0000003573(a1)):s_cbb<oo2873?(a1=cbb00000034({}),a1=cbb00000035(a3),a2=cbb00000036(a1),a1=a1 in a2,a1=cbb00000037(a1)):(a1=cbb0000007191(a3),a2=cbb0000007192(a4),a1=a1^a2,a5=cbb0000007193(a1),a1=cbb0000007194(a3),a2=cbb0000007195(a1),a1=a1<a2,a1=cbb0000007196(a1)):s_cbb<oo2874?s_cbb<oo2875?(a1=cbb0000007681(a2),a2=cbb0000007682(a1),a1=a2-a1,a1=cbb0000007683(a1),a1=cbb0000007684(a2),a2=cbb0000007685(a1),a1=a1+a2,a1=cbb0000007686(a1)):(a1=cbb0000003809(a2),a2=cbb0000003810(a1),a1=a2-a1,a1=cbb0000003811(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003812()):a4.splice(oo4806,oo4806,cbb0000003813());}a1=cbb0000003814(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a1=cbb0000007558(a2),a2=cbb0000007559(a1),a1=a2==a1,a4=cbb0000007560(a1),a1=shuz[start++],a2=cbb0000007561(a3),a3=cbb0000007562(a1),a2[constantPool[a1]]=a3):s_cbb<oo2885?s_cbb<oo2886?s_cbb<oo2887?(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004197(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004198(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004199(a4);}(),a5=cbb0000004200(a3),a4=cbb0000004201(!a5)):(a1=cbb0000002400(a2),a2=cbb0000002401(a3),a1=a1&a2,a3=cbb0000002402(a1),a5=cbb0000002403(a3),a4=cbb0000002404(!a5)):s_cbb<oo2895?(a1=cbb000000411(a2),a2=cbb000000412(a1),a1=a1/a2,a3=cbb000000413(a1),a1=cbb000000414(a2),a2=cbb000000415(a1),a1=a2-a1,a1=cbb000000416(a1)):(a5=cbb0000003766(a3),a1=cbb0000003767(typeof a5),a1=cbb0000003768(a6),a2=cbb0000003769(a7),a1=a1|a2,a8=cbb0000003770(a1)):s_cbb<oo2896?s_cbb<oo2897?(a2=cbb0000001144([]),j=cbb0000001145(a1),j2=cbb0000001146(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a1=cbb0000002213(a3),a2=cbb0000002214(a4),a3=cbb0000002215(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002216(),a2=cbb0000002217(),a1=a2!=a1,cbb0000002218(a1)):(a1=cbb0000007332(a5),a2=cbb0000007333(a6),a1=a2*a1,a7=cbb0000007334(a1),all=cbbb,a1=cbb0000007335(cbbb));}else if(s_cbb<oo2898){if(s_cbb<oo2899){if(s_cbb<oo2900){s_cbb<oo2901?(a2=cbb0000001501(allthis),a1=shuz[start++],start+=a1):(a1=cbb0000007858(a4),a2=cbb0000007859(a5),a1=a1-a2,a6=cbb0000007860(a1),a1=cbb0000007861(a2),a2=cbb0000007862(a1),a1=a2-a1,a1=cbb0000007863(a1));}else if(s_cbb<oo2902){a1=cbb000000802(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000803(a1)):a9=oo4829;;a1=cbb000000804(a6);a2=cbb000000805(a7);a1=a1|a2;a8=cbb000000806(a1);}else{a5=cbb0000007723(a3);a4=cbb0000007724(!a5);a1=cbb0000007725(a2);a2=cbb0000007726(a1);a1=a1+a2;a1=cbb0000007727(a1);}}else s_cbb<oo2904?s_cbb<oo2905?(a1=cbb0000005812(a5),a2=cbb0000005813(a9),a1=a2<a1,a6=cbb0000005814(a1),a1=cbb0000005815(a3),a2=cbb0000005816(a4),a1=a1>>a2,a5=cbb0000005817(a1)):(a1=cbb0000003219(a2),a2=cbb0000003220(a1),a1=a2==a1,a4=cbb0000003221(a1),a1=cbb0000003222(a2),a2=cbb0000003223(a1),a1=a2===a1,a3=cbb0000003224(a1)):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004131(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004132(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004133(a4);}(),a1=cbb0000004134(a3),a2=cbb0000004135(a1),a1=a1<a2,a1=cbb0000004136(a1));}else if(s_cbb<oo2913){s_cbb<oo2914?s_cbb<oo2915?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008294(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008295(a5),a2=cbb0000008296(a6),a1=a2!==a1,a1=cbb0000008297(a1)):(a1=cbb0000003033(a6),a2=cbb0000003034(a7),a1=a1|a2,a8=cbb0000003035(a1),a1=cbb0000003036(a5),a2=cbb0000003037(a6),a1=a2!==a1,a1=cbb0000003038(a1)):s_cbb<oo2917?(a1=cbb0000005498(a2),a2=cbb0000005499(a3),a1=a1&a2,a3=cbb0000005500(a1),a1=cbb0000005501(a5),a2=cbb0000005502(a9),a1=a2<a1,a6=cbb0000005503(a1)):(a1=cbb0000002652(a3),a2=cbb0000002653(a4),a1=a1^a2,a5=cbb0000002654(a1),a5=cbb0000002655(a1),a2=cbb0000002656(void a5));}else if(s_cbb<oo2918){if(s_cbb<oo2919){a1=cbb0000003440(a3);a2=cbb0000003441(a1);a1=a1<a2;a1=cbb0000003442(a1);a1=cbb0000003443(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003444(a1)):a9=oo4829;;}else{a1=cbb0000001056(a2);a2=cbb0000001057(a1);a1=a2-a1;a1=cbb0000001058(a1);return;}}else{a1=cbb000000236();a2=cbb000000237();a1=a2!=a1;cbb000000238(a1);a1=cbb000000239(a4);a2=cbb000000240(a1);a1=a1>=a2;a2=cbb000000241(a1);}}else if(s_cbb<oo2921){if(s_cbb<oo2922){if(s_cbb<oo2923){if(s_cbb<oo2924){if(s_cbb<oo2925){a1=cbb000000721(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000722(a1)):a9=oo4829;;a2=cbb000000723(allthis);}else{a2=cbb0000001944([]);a1=cbb0000001945(a2);a2=cbb0000001946(a1);a1=a2-a1;a1=cbb0000001947(a1);}}else s_cbb<oo2927?(a1=cbb0000006644(a5),a2=cbb0000006645(a6),a1=a2!==a1,a1=cbb0000006646(a1),a1=cbb0000006647(a3),a2=cbb0000006648(a4),a1=a1%a2,a5=cbb0000006649(a1)):(a1=cbb0000002992(a5),a2=cbb0000002993(a6),a1=a2!==a1,a1=cbb0000002994(a1),a1=cbb0000002995(),a2=cbb0000002996(),a1=a2!=a1,cbb0000002997(a1));}else s_cbb<oo2928?s_cbb<oo2929?(a2=cbb0000002056([]),a1=cbb0000002057({})):(a2=cbb0000005976(allthis),a1=cbb0000005977(a6),a2=cbb0000005978(a7),a1=a1|a2,a8=cbb0000005979(a1)):(a1=cbb0000004552(a3),a2=cbb0000004553(a3),a1=a1>a2,a2=cbb0000004554(a1),a5=cbb0000004555(a3),a1=cbb0000004556(~a5));}else if(s_cbb<oo2930){if(s_cbb<oo2931){s_cbb<oo2932?(a1=cbb000000272(),a2=cbb000000273(),a1=a2!=a1,cbb000000274(a1),a1=cbb000000275(a3),a2=cbb000000276(a3),a1=a1>a2,a2=cbb000000277(a1)):(a1=cbb0000005942(a6),a2=cbb0000005943(a7),a1=a1|a2,a8=cbb0000005944(a1),a1=cbb0000005945({}));}else if(s_cbb<oo2933){a1=cbb0000007145(a3);a2=cbb0000007146(a1);a1=a1<a2;a1=cbb0000007147(a1);a1=cbb0000007148(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}else{a1=cbb0000008018(a2);a2=cbb0000008019(a1);a1=a1+a2;a1=cbb0000008020(a1);a1=shuz[start++];a2=cbb0000008021(constantPool[a1]);}}else s_cbb<oo2935?s_cbb<oo2936?(a1=cbb0000003134(a5),a2=cbb0000003135(a6),a1=a2*a1,a7=cbb0000003136(a1),a1=cbb0000003137(a2),a2=cbb0000003138(a1),a1=a2===a1,a3=cbb0000003139(a1)):(a1=cbb0000007925(a3),a2=cbb0000007926(a4),a1=a1^a2,a5=cbb0000007927(a1),a1=shuz[start++],a2=cbb0000007928(a1)):(a1=cbb0000002680(a5),a2=cbb0000002681(a6),a1=a2!==a1,a1=cbb0000002682(a1),a5=cbb0000002683(a1),a2=cbb0000002684(void a5));}else s_cbb<oo2937?s_cbb<oo2938?s_cbb<oo2939?s_cbb<oo2940?(a1=cbb0000002092({}),a1=cbb0000002093(a3),a2=cbb0000002094(a4),a1=a1^a2,a5=cbb0000002095(a1)):(a1=cbb0000006284(a2),a2=cbb0000006285(a1),a1=a1/a2,a3=cbb0000006286(a1),a1=cbb0000006287(a3),a2=cbb0000006288(a1),a1=a1<a2,a1=cbb0000006289(a1)):s_cbb<oo2941?(a1=cbb0000004695(a2),a2=cbb0000004696(a1),a1=a2-a1,a1=cbb0000004697(a1),a1=cbb0000004698(a3),a2=cbb0000004699(a1),a2.push(a1),a1=cbb0000004700(a2)):(a5=cbb0000003754(a3),a1=cbb0000003755(typeof a5),a1=cbb0000003756(a3),a2=cbb0000003757(a4),a1=a1>>a2,a5=cbb0000003758(a1)):s_cbb<oo2942?s_cbb<oo2943?(a1=cbb0000005789(a2),a2=cbb0000005790(a1),a1=a2==a1,a4=cbb0000005791(a1),a1=cbb0000005792(a3),a2=cbb0000005793(a4),a1=a1>>a2,a5=cbb0000005794(a1)):(a5=cbb0000008186(a3),a4=cbb0000008187(!a5),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008188(a1)):(a1=cbb0000004095(a6),a2=cbb0000004096(a1),a1=a1>>>a2,a2=cbb0000004097(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004098(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004099(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004100(a4);}()):s_cbb<oo2951?s_cbb<oo2952?s_cbb<oo2953?(a1=cbb0000004681(a3),a2=cbb0000004682(a1),a2.push(a1),a1=cbb0000004683(a2),a2=cbb0000004684([])):(a1=cbb0000004320(a2),a2=cbb0000004321(a3),a3=cbb0000004322(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004323(a3),a1=cbb0000004324(a3),a2=cbb0000004325(a2),a1=a1<<a2,a1=cbb0000004326(a1)):(a1=cbb0000006378(a3),a2=cbb0000006379(a2),a1=a1<<a2,a1=cbb0000006380(a1),a1=cbb0000006381(a2),a2=cbb0000006382(a1),a1=a1/a2,a3=cbb0000006383(a1)):s_cbb<oo2954?s_cbb<oo2955?(a1=cbb0000002949(a2),a2=cbb0000002950(a1),a1=a2===a1,a3=cbb0000002951(a1),a1=cbb0000002952(a5),a2=cbb0000002953(a6),a1=a2!==a1,a1=cbb0000002954(a1)):(a1=cbb0000006501(a2),a2=cbb0000006502(a3),a1=a1&a2,a3=cbb0000006503(a1),a1=cbb0000006504(a3),a2=cbb0000006505(a4),a1=a1^a2,a5=cbb0000006506(a1)):(a1=cbb0000002331(a3),a2=cbb0000002332(a1),a1=a1<a2,a1=cbb0000002333(a1),a5=cbb0000002334(a3),a4=cbb0000002335(!a5));}else if(s_cbb<oo2956){if(s_cbb<oo2957){if(s_cbb<oo2958){if(s_cbb<oo2959){if(s_cbb<oo2960){if(s_cbb<oo2961){s_cbb<oo2962?s_cbb<oo2963?(a1=cbb0000004453(a2),a2=cbb0000004454(a3),a3=cbb0000004455(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004456(a3),a1=cbb0000004457(a4),a2=cbb0000004458(a5),a1=a1-a2,a6=cbb0000004459(a1)):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004238(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004239(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004240(a4);}(),a1=cbb0000004241({})):s_cbb<oo2971?(a1=cbb0000004539(a3),a2=cbb0000004540(a3),a1=a1>a2,a2=cbb0000004541(a1),a5=cbb0000004542(a3),a1=cbb0000004543(~a5)):(a1=cbb0000007657(a2),a2=cbb0000007658(a1),a1=a1+a2,a1=cbb0000007659(a1),a1=cbb0000007660(a3),a2=cbb0000007661(a2),a1=a1<<a2,a1=cbb0000007662(a1));}else if(s_cbb<oo2972){s_cbb<oo2973?(a1=cbb00000078(a3),a2=cbb00000079(a1),a1=a1 in a2,a1=cbb00000080(a1),a1=cbb00000081(a5),a2=cbb00000082(a6),a1=a2!==a1,a1=cbb00000083(a1)):(a2=cbb0000001111(allthis),j=cbb0000001112(a1),j2=cbb0000001113(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1);}else{a2=cbb0000008365([]);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008366(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else if(s_cbb<oo2985){s_cbb<oo2986?s_cbb<oo2987?(a1=cbb0000004294(a2),a2=cbb0000004295(a3),a3=cbb0000004296(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004297(a3),a1=cbb0000004298(a5),a2=cbb0000004299(a6),a1=a2!==a1,a1=cbb0000004300(a1)):(a1=shuz[start++],a2=cbb0000007563(a3),a3=cbb0000007564(a1),a2[constantPool[a1]]=a3,a1=cbb0000007565(a6),a2=cbb0000007566(a7),a1=a1|a2,a8=cbb0000007567(a1)):s_cbb<oo2988?(a1=cbb0000007299(a3),a2=cbb0000007300(a1),a1=a1<a2,a1=cbb0000007301(a1),a5=cbb0000007302(a1),a2=cbb0000007303(void a5)):(a1=cbb0000004665(a3),a2=cbb0000004666(a1),a2.push(a1),a1=cbb0000004667(a2),a5=cbb0000004668(a1),a2=cbb0000004669(void a5));}else if(s_cbb<oo2989){s_cbb<oo2990?(a1=cbb0000003080({}),a1=cbb0000003081(a5),a2=cbb0000003082(a6),a1=a2!==a1,a1=cbb0000003083(a1)):(a1=cbb0000007630(a2),a2=cbb0000007631(a1),a1=a1+a2,a1=cbb0000007632(a1),function(){debugger;}());}else{a1=cbb000000739(a6);a2=cbb000000740(a7);a1=a1|a2;a8=cbb000000741(a1);a1=cbb000000742(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000743(a1)):a9=oo4829;;}}else if(s_cbb<oo2992){s_cbb<oo2993?s_cbb<oo2994?s_cbb<oo2995?(a1=cbb0000005432(a6),a2=cbb0000005433(a7),a1=a1|a2,a8=cbb0000005434(a1),a1=cbb0000005435(a2),a2=cbb0000005436(a3),a1=a1&a2,a3=cbb0000005437(a1)):(all=cbbb,a1=cbb0000007406(cbbb),a1=cbb0000007407(a2),a2=cbb0000007408(a1),a1=a1+a2,a1=cbb0000007409(a1)):s_cbb<oo2996?(a1=cbb0000005243(a4),a2=cbb0000005244(a1),a1=a1>=a2,a2=cbb0000005245(a1),a1=cbb0000005246(a2),a2=cbb0000005247(a1),a1=a1/a2,a3=cbb0000005248(a1)):(a2=cbb0000003054([]),a1=cbb0000003055(a5),a2=cbb0000003056(a6),a1=a2!==a1,a1=cbb0000003057(a1)):s_cbb<oo2997?s_cbb<oo2998?(a1=cbb0000002615(a6),a2=cbb0000002616(a1),a1=a1>>>a2,a2=cbb0000002617(a1),function(){i=[];a2=cbb0000002618(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}()):(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004149(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004150(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004151(a4);}(),a1=cbb0000004152(a6),a2=cbb0000004153(a1),a1=a1>>>a2,a2=cbb0000004154(a1)):(a1=cbb0000005749(a5),a2=cbb0000005750(a9),a1=a2<a1,a6=cbb0000005751(a1),a1=cbb0000005752(a6),a2=cbb0000005753(a1),a1=a1>>>a2,a2=cbb0000005754(a1));}else if(s_cbb<oo3006){if(s_cbb<oo3007){s_cbb<oo3008?(function(){a1=cbb0000002433(a2);throw a1;}(),a1=cbb0000002434(a4),a2=cbb0000002435(a1),a1=a1>=a2,a2=cbb0000002436(a1)):(a5=cbb0000003654(a2),a2=cbb0000003655(-a5),a1=cbb0000003656(a6),a2=cbb0000003657(a7),a1=a1|a2,a8=cbb0000003658(a1));}else if(s_cbb<oo3009){all=cbbb;a1=cbb0000007367(cbbb);a1=cbb0000007368(a5);a2=cbb0000007369(a9);a1=a2<a1;a6=cbb0000007370(a1);}else{(function(){debugger;})();a1=cbb000000680(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000681(a1)):a9=oo4829;;}}else s_cbb<oo3011?s_cbb<oo3012?(a5=cbb0000002690(a1),a2=cbb0000002691(void a5),a1=cbb0000002692(cbbb),a2=cbb0000002693(a1),a1=a1<=a2,a5=cbb0000002694(a1)):(a1=cbb0000005670(a3),a2=cbb0000005671(a4),a1=a1>>a2,a5=cbb0000005672(a1),a1=cbb0000005673(a6),a2=cbb0000005674(a1),a1=a1>>>a2,a2=cbb0000005675(a1)):(a1=cbb0000001373(a2),a2=cbb0000001374(a1),a1=a1/a2,a3=cbb0000001375(a1),a1=cbb0000001376(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820);}else if(s_cbb<oo3014){if(s_cbb<oo3015){if(s_cbb<oo3016){s_cbb<oo3017?s_cbb<oo3018?(function(){a1=cbb0000002496(a2);throw a1;}(),function(){debugger;}()):(a1=cbb0000006973(),a2=cbb0000006974(),a1=a2!=a1,cbb0000006975(a1),a1=cbb0000006976(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):s_cbb<oo3020?(a5=cbb000000184(a1),a2=cbb000000185(void a5),a1=cbb000000186(),a2=cbb000000187(),a1=a2!=a1,cbb000000188(a1)):(a1=cbb0000004641(a3),a2=cbb0000004642(a1),a2.push(a1),a1=cbb0000004643(a2),a1=cbb0000004644(cbbb),a2=cbb0000004645(a1),a1=a1<=a2,a5=cbb0000004646(a1));}else if(s_cbb<oo3021){if(s_cbb<oo3022){a1=cbb0000003425(a3);a2=cbb0000003426(a3);a1=a1>a2;a2=cbb0000003427(a1);a1=cbb0000003428(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003429(a1)):a9=oo4829;;}else{a1=cbb0000006858(a5);a2=cbb0000006859(a6);a1=a2*a1;a7=cbb0000006860(a1);a1=cbb0000006861(a3);a2=cbb0000006862(a3);a1=a1>a2;a2=cbb0000006863(a1);}}else{(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004161(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004162(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004163(a4);})();a1=cbb0000004164(a3);a2=cbb0000004165(a3);a1=a1>a2;a2=cbb0000004166(a1);}}else if(s_cbb<oo3031){s_cbb<oo3032?s_cbb<oo3033?(function(){debugger;}(),a1=cbb000000507(a2),a2=cbb000000508(a1),a1=a2-a1,a1=cbb000000509(a1)):(a1=shuz[start++],a2=cbb0000007998(a1),a1=cbb0000007999(a3),a2=cbb0000008000(a1),a1=a1<a2,a1=cbb0000008001(a1)):s_cbb<oo3034?(a1=cbb0000008325(a3),a2=cbb0000008326(a2),a1=a1<<a2,a1=cbb0000008327(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008328(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb0000007320(a3),a2=cbb0000007321(a1),a1=a1<a2,a1=cbb0000007322(a1),a1=cbb0000007323(a4),a2=cbb0000007324(a5),a1=a1-a2,a6=cbb0000007325(a1));}else if(s_cbb<oo3036){if(s_cbb<oo3037){a1=cbb0000008403(a5);a2=cbb0000008404(a6);a1=a2!==a1;a1=cbb0000008405(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008406(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}else{a1=cbb0000006360(a2);a2=cbb0000006361(a1);a1=a1/a2;a3=cbb0000006362(a1);a1=cbb0000006363(a4);a2=cbb0000006364(a5);a1=a1-a2;a6=cbb0000006365(a1);}}else{a5=cbb000000992(a1);a2=cbb000000993(void a5);return;}}else if(s_cbb<oo3049){s_cbb<oo3050?s_cbb<oo3051?s_cbb<oo3052?(function(){debugger;}(),function(){debugger;}()):(function(){debugger;}(),a1=cbb0000004768(a2),a2=cbb0000004769(a1),a1=a2==a1,a4=cbb0000004770(a1)):s_cbb<oo3053?(a5=cbb0000003718(a3),a1=cbb0000003719(typeof a5),a1=cbb0000003720(a3),a2=cbb0000003721(a1),a1=a1<a2,a1=cbb0000003722(a1)):(function(){debugger;}(),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004107(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004108(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004109(a4);}()):s_cbb<oo3061?s_cbb<oo3062?(a1=cbb0000003201(a2),a2=cbb0000003202(a1),a1=a2===a1,a3=cbb0000003203(a1),a1=cbb0000003204(a6),a2=cbb0000003205(a1),a1=a1>>>a2,a2=cbb0000003206(a1)):(a1=cbb0000006971(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a2=cbb0000006972(allthis)):(a1=cbb0000005777(),a2=cbb0000005778(),a1=a2!=a1,cbb0000005779(a1),a1=cbb0000005780(a3),a2=cbb0000005781(a4),a1=a1>>a2,a5=cbb0000005782(a1));}else if(s_cbb<oo3064){s_cbb<oo3065?s_cbb<oo3066?(a1=cbb0000007009(a6),a2=cbb0000007010(a1),a1=a1>>>a2,a2=cbb0000007011(a1),a1=cbb0000007012(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):(a1=cbb0000006991(a3),a2=cbb0000006992(a3),a1=a1>a2,a2=cbb0000006993(a1),a1=cbb0000006994(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):s_cbb<oo3069?(a1=cbb000000939(a5),a2=cbb000000940(a9),a1=a2<a1,a6=cbb000000941(a1),a1=cbb000000942(),a2=cbb000000943(),a1=a2!=a1,cbb000000944(a1)):(a1=cbb0000002551(a6),a2=cbb0000002552(a7),a1=a1|a2,a8=cbb0000002553(a1),function(){i=[];a2=cbb0000002554(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}());}else if(s_cbb<oo3070){if(s_cbb<oo3071){a1=cbb0000001313(a3);a2=cbb0000001314(a4);a1=a1^a2;a5=cbb0000001315(a1);(function(){debugger;})();}else{a1=cbb0000007141(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007142(a5);a2=cbb0000007143(a6);a1=a2!==a1;a1=cbb0000007144(a1);}}else{(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008229(a2);cbbb[a9]=argsList[a2];}})();a1=cbb0000008230(a3);a2=cbb0000008231(a3);a1=a1>a2;a2=cbb0000008232(a1);}}else if(s_cbb<oo3074){if(s_cbb<oo3075){if(s_cbb<oo3076){s_cbb<oo3077?s_cbb<oo3078?s_cbb<oo3079?(a1=cbb0000003335(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003336(a4),a2=cbb0000003337(a5),a1=a1-a2,a6=cbb0000003338(a1)):(a1=shuz[start++],a2=cbb0000007492(a3),a3=cbb0000007493(a1),a2[constantPool[a1]]=a3,a1=cbb0000007494(a2),a2=cbb0000007495(a1),a1=a1/a2,a3=cbb0000007496(a1)):s_cbb<oo3081?(a1=cbb0000005361(a2),a2=cbb0000005362(a1),a1=a1+a2,a1=cbb0000005363(a1),a1=cbb0000005364(cbbb),a2=cbb0000005365(a1),a1=a1<=a2,a5=cbb0000005366(a1)):(a1=cbb0000005537(a6),a2=cbb0000005538(a7),a1=a1|a2,a8=cbb0000005539(a1),a1=cbb0000005540(a2),a2=cbb0000005541(a3),a1=a1&a2,a3=cbb0000005542(a1)):s_cbb<oo3082?s_cbb<oo3083?(a1=cbb0000004865(a2),a2=cbb0000004866(a1),a1=a2==a1,a4=cbb0000004867(a1),a1=cbb0000004868(a6),a2=cbb0000004869(a1),a1=a1>>>a2,a2=cbb0000004870(a1)):(a1=cbb000000111(a5),a2=cbb000000112(a9),a1=a2<a1,a6=cbb000000113(a1),a1=cbb000000114(a3),a2=cbb000000115(a1),a1=a1 in a2,a1=cbb000000116(a1)):(a1=cbb0000004647(a6),a2=cbb0000004648(a1),a1=a1>>>a2,a2=cbb0000004649(a1),a1=cbb0000004650(a3),a2=cbb0000004651(a1),a2.push(a1),a1=cbb0000004652(a2));}else if(s_cbb<oo3084){if(s_cbb<oo3085){if(s_cbb<oo3086){a1=cbb0000003122(a5);a2=cbb0000003123(a6);a1=a2*a1;a7=cbb0000003124(a1);a1=cbb0000003125(a2);a2=cbb0000003126(a1);a1=a2===a1;a3=cbb0000003127(a1);}else{a1=cbb0000001062({});return;}}else s_cbb<oo3087?(a1=cbb0000006765(a6),a2=cbb0000006766(a7),a1=a1|a2,a8=cbb0000006767(a1),a1=cbb0000006768(a3),a2=cbb0000006769(a4),a1=a1%a2,a5=cbb0000006770(a1)):(a1=cbb0000007917(a2),a2=cbb0000007918(a1),a1=a1+a2,a1=cbb0000007919(a1),a1=shuz[start++],a2=cbb0000007920(a1));}else s_cbb<oo3088?s_cbb<oo3089?(a1=cbb0000006302(a2),a2=cbb0000006303(a1),a1=a1/a2,a3=cbb0000006304(a1),a1=cbb0000006305(),a2=cbb0000006306(),a1=a2!=a1,cbb0000006307(a1)):(a1=cbb0000005515(a2),a2=cbb0000005516(a3),a1=a1&a2,a3=cbb0000005517(a1),a1=cbb0000005518(a4),a2=cbb0000005519(a1),a1=a1>=a2,a2=cbb0000005520(a1)):(all=cbbb,a1=cbb0000007423(cbbb),a5=cbb0000007424(a3),a4=cbb0000007425(!a5));}else s_cbb<oo3090?s_cbb<oo3091?s_cbb<oo3092?s_cbb<oo3093?(a1=cbb0000006680(a3),a2=cbb0000006681(a4),a1=a1%a2,a5=cbb0000006682(a1),a1=cbb0000006683(a3),a2=cbb0000006684(a4),a1=a1%a2,a5=cbb0000006685(a1)):(a1=cbb0000005643(a6),a2=cbb0000005644(a1),a1=a1>>>a2,a2=cbb0000005645(a1),a1=cbb0000005646(a6),a2=cbb0000005647(a7),a1=a1|a2,a8=cbb0000005648(a1)):s_cbb<oo3094?(a1=cbb0000004755(a3),a2=cbb0000004756(a1),a2.push(a1),a1=cbb0000004757(a2),a2=cbb0000004758([])):(a1=cbb0000005946(a6),a2=cbb0000005947(a7),a1=a1|a2,a8=cbb0000005948(a1),a1=cbb0000005949(a3),a2=cbb0000005950(a4),a1=a1>>a2,a5=cbb0000005951(a1)):s_cbb<oo3095?s_cbb<oo3096?(a1=cbb0000007263(a3),a2=cbb0000007264(a4),a1=a1^a2,a5=cbb0000007265(a1),a1=cbb0000007266(a3),a2=cbb0000007267(a1),a1=a1<a2,a1=cbb0000007268(a1)):(a1=cbb0000005658(a3),a2=cbb0000005659(a4),a1=a1%a2,a5=cbb0000005660(a1),a1=cbb0000005661(a6),a2=cbb0000005662(a1),a1=a1>>>a2,a2=cbb0000005663(a1)):(a1=cbb000000652(a5),a2=cbb000000653(a3),a3=cbb000000654(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a5=cbb000000655(a1),a2=cbb000000656(void a5)):s_cbb<oo3097?s_cbb<oo3098?s_cbb<oo3099?(a1=cbb0000005181(a4),a2=cbb0000005182(a1),a1=a1>=a2,a2=cbb0000005183(a1),a1=cbb0000005184(a6),a2=cbb0000005185(a7),a1=a1|a2,a8=cbb0000005186(a1)):(a2=cbb0000002188(allthis),a1=cbb0000002189(a3),a2=cbb0000002190(a4),a3=cbb0000002191(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):s_cbb<oo3100?(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003979()):a4.splice(oo4806,oo4806,cbb0000003980());}a1=cbb0000003981(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003982(a2),a2=cbb0000003983(a1),a1=a1+a2,a1=cbb0000003984(a1)):(a1=cbb0000007728(a2),a2=cbb0000007729(a1),a1=a1/a2,a3=cbb0000007730(a1),a1=cbb0000007731(a4),a2=cbb0000007732(a5),a1=a1-a2,a6=cbb0000007733(a1)):s_cbb<oo3110?s_cbb<oo3111?(a1=cbb0000001307(a5),a2=cbb0000001308(a6),a1=a2*a1,a7=cbb0000001309(a1),function(){debugger;}()):(a1=cbb0000005476(cbbb),a2=cbb0000005477(a1),a1=a1<=a2,a5=cbb0000005478(a1),a1=cbb0000005479(a2),a2=cbb0000005480(a3),a1=a1&a2,a3=cbb0000005481(a1)):(function(){debugger;}(),a1=shuz[start++],start+=a1);}else if(s_cbb<oo3112){if(s_cbb<oo3113){s_cbb<oo3114?s_cbb<oo3115?s_cbb<oo3116?(a1=cbb0000003713(a2),a2=cbb0000003714(a1),a1=a1+a2,a1=cbb0000003715(a1),a5=cbb0000003716(a3),a1=cbb0000003717(typeof a5)):(a1=cbb0000005579(a2),a2=cbb0000005580(a3),a1=a1&a2,a3=cbb0000005581(a1),a1=cbb0000005582(),a2=cbb0000005583(),a1=a2!=a1,cbb0000005584(a1)):s_cbb<oo3117?(a1=cbb0000003090(a5),a2=cbb0000003091(a6),a1=a2!==a1,a1=cbb0000003092(a1),a1=cbb0000003093(a2),a2=cbb0000003094(a3),a1=a1&a2,a3=cbb0000003095(a1)):(a1=shuz[start++],a2=cbb0000007939(a1),a2=cbb0000007940([])):s_cbb<oo3118?s_cbb<oo3119?(a5=cbb0000003004(a1),a2=cbb0000003005(void a5),a1=cbb0000003006(a5),a2=cbb0000003007(a6),a1=a2!==a1,a1=cbb0000003008(a1)):(a1=cbb0000002028({}),a1=cbb0000002029(a5),a2=cbb0000002030(a6),a1=a2*a1,a7=cbb0000002031(a1)):(a1=cbb000000561(a2),a2=cbb000000562(a3),a1=a1&a2,a3=cbb000000563(a1),a1=cbb000000564(a5),a2=cbb000000565(a3),a3=cbb000000566(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1);}else if(s_cbb<oo3120){if(s_cbb<oo3121){s_cbb<oo3122?(a1=cbb0000004308(a2),a2=cbb0000004309(a3),a3=cbb0000004310(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004311(a3),a5=cbb0000004312(a3),a4=cbb0000004313(!a5)):(a1=cbb0000007790(a2),a2=cbb0000007791(a1),a1=a1+a2,a1=cbb0000007792(a1),a1=cbb0000007793(a4),a2=cbb0000007794(a5),a1=a1-a2,a6=cbb0000007795(a1));}else if(s_cbb<oo3123){a1=cbb0000002048({});a1=cbb0000002049(a5);a2=cbb0000002050(a6);a1=a2!==a1;a1=cbb0000002051(a1);}else{a1=cbb0000008357(a2);a2=cbb0000008358(a1);a1=a2-a1;a1=cbb0000008359(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008360(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo3135?s_cbb<oo3136?(a1=cbb000000620(a2),a2=cbb000000621(a1),a1=a2-a1,a1=cbb000000622(a1),a1=cbb000000623(a5),a2=cbb000000624(a3),a3=cbb000000625(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb0000003931(a3),a2=cbb0000003932(a2),a1=a1<<a2,a1=cbb0000003933(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003934()):a4.splice(oo4806,oo4806,cbb0000003935());}a1=cbb0000003936(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a1=cbb0000006816(a2),a2=cbb0000006817(a1),a1=a2-a1,a1=cbb0000006818(a1),a1=cbb0000006819(a5),a2=cbb0000006820(a6),a1=a2*a1,a7=cbb0000006821(a1));}else s_cbb<oo3146?s_cbb<oo3147?s_cbb<oo3148?s_cbb<oo3149?(a1=cbb0000006178(a6),a2=cbb0000006179(a1),a1=a1>>>a2,a2=cbb0000006180(a1),a1=cbb0000006181(a3),a2=cbb0000006182(a2),a1=a1<<a2,a1=cbb0000006183(a1)):(a1=cbb0000002980(a5),a2=cbb0000002981(a6),a1=a2!==a1,a1=cbb0000002982(a1),a1=cbb0000002983(),a2=cbb0000002984(),a1=a2!=a1,cbb0000002985(a1)):s_cbb<oo3150?(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003919()):a4.splice(oo4806,oo4806,cbb0000003920());}a1=cbb0000003921(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003922(),a2=cbb0000003923(),a1=a2!=a1,cbb0000003924(a1)):(a5=cbb0000005698(a1),a2=cbb0000005699(void a5),a1=cbb0000005700(a6),a2=cbb0000005701(a1),a1=a1>>>a2,a2=cbb0000005702(a1)):s_cbb<oo3160?s_cbb<oo3161?(a1=cbb0000003363(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003364(a4),a2=cbb0000003365(a1),a1=a1>=a2,a2=cbb0000003366(a1)):(a1=cbb0000001551(a2),a2=cbb0000001552(a1),a1=a2-a1,a1=cbb0000001553(a1),function(){a1=cbb0000001554(a2);a2=cbb0000001555(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001556(a1);}()):(a1=cbb0000005298(a2),a2=cbb0000005299(a1),a1=a2-a1,a1=cbb0000005300(a1),a1=cbb0000005301(cbbb),a2=cbb0000005302(a1),a1=a1<=a2,a5=cbb0000005303(a1)):s_cbb<oo3163?s_cbb<oo3164?s_cbb<oo3165?(a1=cbb0000001411(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a5=cbb0000001412(a1),a2=cbb0000001413(void a5)):(a1=cbb0000004465(),a2=cbb0000004466(),a1=a2!=a1,cbb0000004467(a1),a5=cbb0000004468(a3),a1=cbb0000004469(~a5)):(a1=cbb0000007467({}),a1=shuz[start++],a2=cbb0000007468(a3),a3=cbb0000007469(a1),a2[constantPool[a1]]=a3):s_cbb<oo3167?s_cbb<oo3168?(a1=cbb0000004202(a4),a2=cbb0000004203(a1),a1=a1>=a2,a2=cbb0000004204(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004205(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004206(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004207(a4);}()):(a2=cbb0000007996(allthis),a1=shuz[start++],a2=cbb0000007997(a1)):(function(){debugger;}(),function(){debugger;}());}else if(s_cbb<oo3176){if(s_cbb<oo3177){if(s_cbb<oo3178){if(s_cbb<oo3179){if(s_cbb<oo3180){if(s_cbb<oo3181){s_cbb<oo3182?(function(){debugger;}(),a1=cbb0000002854(a3),a2=cbb0000002855(a1),a3=delete a2[a1],a1=cbb0000002856(a3)):(a1=cbb0000003027(a5),a2=cbb0000003028(a9),a1=a2<a1,a6=cbb0000003029(a1),a1=cbb0000003030(a5),a2=cbb0000003031(a6),a1=a2!==a1,a1=cbb0000003032(a1));}else if(s_cbb<oo3183){a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001247(a3);a2=cbb0000001248(a4);a1=a1%a2;a5=cbb0000001249(a1);}else{a2=cbb0000007218([]);a1=cbb0000007219(a3);a2=cbb0000007220(a1);a1=a1<a2;a1=cbb0000007221(a1);}}else s_cbb<oo3190?s_cbb<oo3191?(a5=cbb0000003332(a1),a2=cbb0000003333(void a5),a1=cbb0000003334(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=cbb0000004446(cbbb),a2=cbb0000004447(a1),a1=a1<=a2,a5=cbb0000004448(a1),a1=cbb0000004449(a2),a2=cbb0000004450(a3),a3=cbb0000004451(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004452(a3)):(a1=cbb0000001948(a3),a2=cbb0000001949(a2),a1=a1<<a2,a1=cbb0000001950(a1),a2=cbb0000001951([]));}else if(s_cbb<oo3193){if(s_cbb<oo3194){s_cbb<oo3195?(a2=cbb0000004022(allthis),a1=cbb0000004023(a2),a2=cbb0000004024(a1),a1=a2===a1,a3=cbb0000004025(a1)):(a1=cbb0000002012({}),a1=cbb0000002013(a5),a2=cbb0000002014(a9),a1=a2<a1,a6=cbb0000002015(a1));}else if(s_cbb<oo3196){a1=cbb0000007412(a6);a2=cbb0000007413(a7);a1=a1|a2;a8=cbb0000007414(a1);all=cbbb;a1=cbb0000007415(cbbb);}else{a1=cbb0000001009(a2);a2=cbb0000001010(a3);a1=a1&a2;a3=cbb0000001011(a1);return;}}else if(s_cbb<oo3197){s_cbb<oo3198?(a1=cbb0000006576(a2),a2=cbb0000006577(a1),a1=a2===a1,a3=cbb0000006578(a1),a1=cbb0000006579(a3),a2=cbb0000006580(a4),a1=a1^a2,a5=cbb0000006581(a1)):(a1=cbb0000004280(a2),a2=cbb0000004281(a3),a3=cbb0000004282(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004283(a3),a1=cbb0000004284(a2),a2=cbb0000004285(a1),a1=a2===a1,a3=cbb0000004286(a1));}else{a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001224(a6);a2=cbb0000001225(a1);a1=a1>>>a2;a2=cbb0000001226(a1);}}else s_cbb<oo3205?s_cbb<oo3206?s_cbb<oo3207?s_cbb<oo3208?(a1=cbb0000005266(cbbb),a2=cbb0000005267(a1),a1=a1<=a2,a5=cbb0000005268(a1),a1=cbb0000005269(a2),a2=cbb0000005270(a1),a1=a2===a1,a3=cbb0000005271(a1)):(j=cbb0000001197(a1),j2=cbb0000001198(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a2=cbb0000001199(allthis)):s_cbb<oo3209?(a5=cbb0000007796(a3),a4=cbb0000007797(!a5),a1=cbb0000007798(a4),a2=cbb0000007799(a5),a1=a1-a2,a6=cbb0000007800(a1)):(a1=cbb0000006771(a3),a2=cbb0000006772(a4),a1=a1%a2,a5=cbb0000006773(a1),a1=cbb0000006774(a4),a2=cbb0000006775(a1),a1=a1>=a2,a2=cbb0000006776(a1)):s_cbb<oo3210?s_cbb<oo3211?(a1=cbb0000003112(a2),a2=cbb0000003113(a1),a1=a2===a1,a3=cbb0000003114(a1),a1=cbb0000003115(a3),a2=cbb0000003116(a1),a1=a1<a2,a1=cbb0000003117(a1)):(a1=cbb0000007874(a4),a2=cbb0000007875(a5),a1=a1-a2,a6=cbb0000007876(a1),a1=cbb0000007877(a2),a2=cbb0000007878(a1),a1=a2-a1,a1=cbb0000007879(a1)):(a1=cbb0000003339(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a5=cbb0000003340(a3),a4=cbb0000003341(!a5)):s_cbb<oo3213?s_cbb<oo3214?s_cbb<oo3215?(a1=cbb0000006033(),a2=cbb0000006034(),a1=a2!=a1,cbb0000006035(a1),a1=cbb0000006036(a6),a2=cbb0000006037(a7),a1=a1|a2,a8=cbb0000006038(a1)):(a1=cbb0000001134(cbbb),a2=cbb0000001135(a1),a1=a1<=a2,a5=cbb0000001136(a1),j=cbb0000001137(a1),j2=cbb0000001138(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):s_cbb<oo3216?(a1=cbb000000975(a3),a2=cbb000000976(a1),a1=a1<a2,a1=cbb000000977(a1),a1=cbb000000978(a5),a2=cbb000000979(a9),a1=a2<a1,a6=cbb000000980(a1)):(a1=cbb000000227(a3),a2=cbb000000228(a1),a1=a1<a2,a1=cbb000000229(a1),a1=cbb000000230(),a2=cbb000000231(),a1=a2!=a1,cbb000000232(a1)):s_cbb<oo3217?s_cbb<oo3218?(a1=cbb0000006914(a5),a2=cbb0000006915(a6),a1=a2!==a1,a1=cbb0000006916(a1),a1=cbb0000006917(a5),a2=cbb0000006918(a6),a1=a2*a1,a7=cbb0000006919(a1)):(a1=shuz[start++],a2=cbb0000008039(constantPool[a1]),a2=cbb0000008040(allthis)):(a1=cbb000000207(),a2=cbb000000208(),a1=a2!=a1,cbb000000209(a1),function(){debugger;}());}else if(s_cbb<oo3219){s_cbb<oo3220?s_cbb<oo3221?s_cbb<oo3222?s_cbb<oo3223?(a1=cbb0000008199(a4),a2=cbb0000008200(a5),a1=a1-a2,a6=cbb0000008201(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008202(a1)):(a1=cbb000000377(a2),a2=cbb000000378(a1),a1=a2-a1,a1=cbb000000379(a1),a1=cbb000000380(a6),a2=cbb000000381(a7),a1=a1|a2,a8=cbb000000382(a1)):s_cbb<oo3224?(a1=cbb0000002076(a3),a2=cbb0000002077(a4),a1=a1%a2,a5=cbb0000002078(a1),a1=cbb0000002079({})):(a5=cbb0000002341(a3),a4=cbb0000002342(!a5),a1=cbb0000002343(a2),a2=cbb0000002344(a1),a1=a2==a1,a4=cbb0000002345(a1)):s_cbb<oo3225?s_cbb<oo3226?(a1=cbb0000005231(a3),a2=cbb0000005232(a4),a1=a1>>a2,a5=cbb0000005233(a1),a1=cbb0000005234(a4),a2=cbb0000005235(a1),a1=a1>=a2,a2=cbb0000005236(a1)):(a1=cbb0000006196(a3),a2=cbb0000006197(a2),a1=a1<<a2,a1=cbb0000006198(a1),a1=cbb0000006199(a5),a2=cbb0000006200(a6),a1=a2*a1,a7=cbb0000006201(a1)):(a1=cbb0000005664(a6),a2=cbb0000005665(a1),a1=a1>>>a2,a2=cbb0000005666(a1),a1=cbb0000005667(a4),a2=cbb0000005668(a5),a1=a1-a2,a6=cbb0000005669(a1)):s_cbb<oo3227?s_cbb<oo3228?s_cbb<oo3229?(a1=cbb0000005958(a6),a2=cbb0000005959(a7),a1=a1|a2,a8=cbb0000005960(a1),a1=cbb0000005961(cbbb),a2=cbb0000005962(a1),a1=a1<=a2,a5=cbb0000005963(a1)):(a5=cbb0000003735(a3),a1=cbb0000003736(typeof a5),a5=cbb0000003737(a3),a4=cbb0000003738(!a5)):s_cbb<oo3230?(a1=cbb0000006238(a3),a2=cbb0000006239(a2),a1=a1<<a2,a1=cbb0000006240(a1),a5=cbb0000006241(a3),a4=cbb0000006242(!a5)):(function(){debugger;}(),a1=cbb0000007615(a2),a2=cbb0000007616(a1),a1=a1+a2,a1=cbb0000007617(a1)):s_cbb<oo3231?s_cbb<oo3232?(a5=cbb0000002284(a3),a4=cbb0000002285(!a5),a1=cbb0000002286(a4),a2=cbb0000002287(a1),a1=a1>=a2,a2=cbb0000002288(a1)):(a1=shuz[start++],start+=a1,a2=cbb0000001497(allthis)):(a1=cbb0000007161(a3),a2=cbb0000007162(a1),a1=a1<a2,a1=cbb0000007163(a1),a1=cbb0000007164(cbbb),a2=cbb0000007165(a1),a1=a1<=a2,a5=cbb0000007166(a1));}else if(s_cbb<oo3233){if(s_cbb<oo3234){if(s_cbb<oo3235){s_cbb<oo3236?(a1=cbb000000457(a5),a2=cbb000000458(a9),a1=a2<a1,a6=cbb000000459(a1),a1=cbb000000460(a2),a2=cbb000000461(a1),a1=a2-a1,a1=cbb000000462(a1)):(a1=cbb0000006615(a3),a2=cbb0000006616(a4),a1=a1%a2,a5=cbb0000006617(a1),a1=cbb0000006618(a4),a2=cbb0000006619(a1),a1=a1>=a2,a2=cbb0000006620(a1));}else if(s_cbb<oo3237){a1=cbb0000003609(a4);a2=cbb0000003610(a5);a1=a1-a2;a6=cbb0000003611(a1);a5=cbb0000003612(a2);a2=cbb0000003613(-a5);}else{(function(){debugger;})();a1=cbb0000003445(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003446(a1)):a9=oo4829;;}}else s_cbb<oo3239?s_cbb<oo3240?(a1=cbb0000006400(a2),a2=cbb0000006401(a1),a1=a1/a2,a3=cbb0000006402(a1),a1=cbb0000006403(a3),a2=cbb0000006404(a2),a1=a1<<a2,a1=cbb0000006405(a1)):(function(){a1=cbb0000001710(a2);a2=cbb0000001711(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001712(a1);}(),a1=cbb0000001713(a5),a2=cbb0000001714(a9),a1=a2<a1,a6=cbb0000001715(a1)):(a1=cbb0000002800(a3),a2=cbb0000002801(a1),a3=delete a2[a1],a1=cbb0000002802(a3),a1=cbb0000002803(a5),a2=cbb0000002804(a9),a1=a2<a1,a6=cbb0000002805(a1));}else s_cbb<oo3241?s_cbb<oo3242?s_cbb<oo3243?(a1=cbb0000001319(a3),a2=cbb0000001320(a3),a1=a1>a2,a2=cbb0000001321(a1),function(){debugger;}()):(a1=cbb0000005456(a2),a2=cbb0000005457(a3),a1=a1&a2,a3=cbb0000005458(a1),a1=cbb0000005459(a2),a2=cbb0000005460(a1),a1=a2==a1,a4=cbb0000005461(a1)):(a1=cbb0000002122(a3),a2=cbb0000002123(a4),a3=cbb0000002124(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002125(a2),a2=cbb0000002126(a1),a1=a2==a1,a4=cbb0000002127(a1)):s_cbb<oo3244?s_cbb<oo3245?(a2=cbb0000003992([]),a2=cbb0000003993(allthis)):(a1=cbb0000003304(a4),a2=cbb0000003305(a5),a1=a1-a2,a6=cbb0000003306(a1),a1=cbb0000003307(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=cbb000000646(a5),a2=cbb000000647(a3),a3=cbb000000648(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000649(a2),a2=cbb000000650(a1),a1=a2==a1,a4=cbb000000651(a1));}else if(s_cbb<oo3247){if(s_cbb<oo3248){s_cbb<oo3249?s_cbb<oo3250?s_cbb<oo3251?s_cbb<oo3252?(a2=cbb0000003382([]),a1=cbb0000003383(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=cbb0000005549(a2),a2=cbb0000005550(a3),a1=a1&a2,a3=cbb0000005551(a1),a1=cbb0000005552(a5),a2=cbb0000005553(a9),a1=a2<a1,a6=cbb0000005554(a1)):s_cbb<oo3254?(a1=cbb000000278(),a2=cbb000000279(),a1=a2!=a1,cbb000000280(a1),a1=cbb000000281(a5),a2=cbb000000282(a6),a1=a2*a1,a7=cbb000000283(a1)):(a1=cbb0000003207(a2),a2=cbb0000003208(a1),a1=a2===a1,a3=cbb0000003209(a1),a1=cbb0000003210(a3),a2=cbb0000003211(a3),a1=a1>a2,a2=cbb0000003212(a1)):s_cbb<oo3255?s_cbb<oo3256?(a1=shuz[start++],start+=a1,a5=cbb0000001533(a1),a2=cbb0000001534(void a5)):(a1=cbb0000003384(a4),a2=cbb0000003385(a1),a1=a1>=a2,a2=cbb0000003386(a1),a1=cbb0000003387(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=shuz[start++],a2=cbb0000007450(a3),a3=cbb0000007451(a1),a2[constantPool[a1]]=a3,a1=cbb0000007452(a3),a2=cbb0000007453(a4),a1=a1^a2,a5=cbb0000007454(a1)):s_cbb<oo3258?s_cbb<oo3259?s_cbb<oo3260?(a1=shuz[start++],start+=a1,a1=cbb0000001505(a3),a2=cbb0000001506(a4),a1=a1%a2,a5=cbb0000001507(a1)):(a2=cbb0000005715([]),a1=cbb0000005716(a6),a2=cbb0000005717(a1),a1=a1>>>a2,a2=cbb0000005718(a1)):s_cbb<oo3261?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008258(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008259(a3),a2=cbb0000008260(a4),a1=a1>>a2,a5=cbb0000008261(a1)):(a1=cbb0000005870(a2),a2=cbb0000005871(a1),a1=a1/a2,a3=cbb0000005872(a1),a1=cbb0000005873(a3),a2=cbb0000005874(a4),a1=a1>>a2,a5=cbb0000005875(a1)):s_cbb<oo3263?s_cbb<oo3264?(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003907()):a4.splice(oo4806,oo4806,cbb0000003908());}a1=cbb0000003909(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003910(a2),a2=cbb0000003911(a1),a1=a2==a1,a4=cbb0000003912(a1)):(a1=cbb0000005420(a2),a2=cbb0000005421(a3),a1=a1&a2,a3=cbb0000005422(a1),a1=cbb0000005423(cbbb),a2=cbb0000005424(a1),a1=a1<=a2,a5=cbb0000005425(a1)):(all=cbbb,a1=cbb0000007353(cbbb),a1=cbb0000007354(a4),a2=cbb0000007355(a1),a1=a1>=a2,a2=cbb0000007356(a1));}else if(s_cbb<oo3274){s_cbb<oo3275?s_cbb<oo3276?s_cbb<oo3277?(a1=cbb0000002836(a3),a2=cbb0000002837(a1),a3=delete a2[a1],a1=cbb0000002838(a3),a1=cbb0000002839(a2),a2=cbb0000002840(a1),a1=a2===a1,a3=cbb0000002841(a1)):(a2=cbb0000001937([]),a5=cbb0000001938(a1),a2=cbb0000001939(void a5)):s_cbb<oo3278?(a1=cbb0000003359(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003360(a3),a2=cbb0000003361(a3),a1=a1>a2,a2=cbb0000003362(a1)):(a1=cbb0000003378(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003379(a3),a2=cbb0000003380(a3),a1=a1>a2,a2=cbb0000003381(a1)):s_cbb<oo3281?s_cbb<oo3282?(function(){a1=cbb0000002490(a2);throw a1;}(),a5=cbb0000002491(a3),a4=cbb0000002492(!a5)):(a1=cbb000000284({}),a1=cbb000000285(),a2=cbb000000286(),a1=a2!=a1,cbb000000287(a1)):(function(){debugger;}(),a1=cbb0000001360(a3),a2=cbb0000001361(a4),a1=a1^a2,a5=cbb0000001362(a1));}else if(s_cbb<oo3283){if(s_cbb<oo3284){if(s_cbb<oo3285){a1=cbb0000002219(a3);a2=cbb0000002220(a4);a1=a1>>a2;a5=cbb0000002221(a1);a1=cbb0000002222(a3);a2=cbb0000002223(a4);a3=cbb0000002224(a5);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;}else{a1=cbb0000003494(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003495(a1)):a9=oo4829;;a2=cbb0000003496([]);}}else s_cbb<oo3287?(a1=cbb0000007018(a2),a2=cbb0000007019(a1),a1=a1+a2,a1=cbb0000007020(a1),a1=cbb0000007021(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):(a1=cbb00000072(a3),a2=cbb00000073(a1),a1=a1 in a2,a1=cbb00000074(a1),a1=cbb00000075(a3),a2=cbb00000076(a3),a1=a1>a2,a2=cbb00000077(a1));}else if(s_cbb<oo3289){if(s_cbb<oo3290){a1=cbb0000003781(cbbb);a2=cbb0000003782(a1);a1=a1<=a2;a5=cbb0000003783(a1);a5=cbb0000003784(a3);a1=cbb0000003785(typeof a5);}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008435(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008436(a4);a2=cbb0000008437(a1);a1=a1>=a2;a2=cbb0000008438(a1);}}else{a1=cbb0000004743(a3);a2=cbb0000004744(a1);a2.push(a1);a1=cbb0000004745(a2);a1=cbb0000004746(a3);a2=cbb0000004747(a4);a1=a1%a2;a5=cbb0000004748(a1);}}else if(s_cbb<oo3302){if(s_cbb<oo3303){if(s_cbb<oo3304){if(s_cbb<oo3305){s_cbb<oo3306?(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004143(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004144(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004145(a4);}(),a1=cbb0000004146(a2),a2=cbb0000004147(a3),a1=a1&a2,a3=cbb0000004148(a1)):(a2=cbb0000007365([]),all=cbbb,a1=cbb0000007366(cbbb));}else if(s_cbb<oo3314){a1=cbb000000706(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000707(a1)):a9=oo4829;;a1=cbb000000708({});}else{a1=cbb0000007067(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007068(a6);a2=cbb0000007069(a7);a1=a1|a2;a8=cbb0000007070(a1);}}else s_cbb<oo3317?s_cbb<oo3318?(a1=cbb0000002088(a3),a2=cbb0000002089(a3),a1=a1>a2,a2=cbb0000002090(a1),a1=cbb0000002091({})):(a1=cbb0000008262(a3),a2=cbb0000008263(a1),a1=a1<a2,a1=cbb0000008264(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008265(a2);cbbb[a9]=argsList[a2];}}()):(a2=cbb000000176([]),a1=cbb000000177(),a2=cbb000000178(),a1=a2!=a1,cbb000000179(a1));}else if(s_cbb<oo3320){if(s_cbb<oo3321){s_cbb<oo3322?(a2=cbb0000001983([]),a1=cbb0000001984(a2),a2=cbb0000001985(a1),a1=a1/a2,a3=cbb0000001986(a1)):(a1=cbb0000004006(a4),a2=cbb0000004007(a5),a1=a1-a2,a6=cbb0000004008(a1),a2=cbb0000004009(allthis));}else if(s_cbb<oo3323){a1=cbb0000001030(a5);a2=cbb0000001031(a6);a1=a2*a1;a7=cbb0000001032(a1);return;}else{a1=cbb0000001288(a5);a2=cbb0000001289(a6);a1=a2!==a1;a1=cbb0000001290(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo3330?s_cbb<oo3331?(function(){debugger;}(),a1=cbb000000584(a5),a2=cbb000000585(a3),a3=cbb000000586(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb000000330(a5),a2=cbb000000331(a9),a1=a2<a1,a6=cbb000000332(a1),a1=cbb000000333(),a2=cbb000000334(),a1=a2!=a1,cbb000000335(a1)):(a1=shuz[start++],a2=cbb0000007896(a1),a1=cbb0000007897(a2),a2=cbb0000007898(a1),a1=a2==a1,a4=cbb0000007899(a1));}else if(s_cbb<oo3332){if(s_cbb<oo3333){if(s_cbb<oo3334){s_cbb<oo3335?(a1=cbb0000008093(a5),a2=cbb0000008094(a6),a1=a2!==a1,a1=cbb0000008095(a1),a1=shuz[start++],a2=cbb0000008096(constantPool[a1])):(a1=cbb000000354(a2),a2=cbb000000355(a1),a1=a2-a1,a1=cbb000000356(a1),a1=cbb000000357(a2),a2=cbb000000358(a1),a1=a2-a1,a1=cbb000000359(a1));}else if(s_cbb<oo3336){a1=cbb000000989(a3);a2=cbb000000990(a1);a1=a1<a2;a1=cbb000000991(a1);return;}else{(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003848()):a4.splice(oo4806,oo4806,cbb0000003849());}a1=cbb0000003850(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}})();a5=cbb0000003851(a1);a2=cbb0000003852(void a5);}}else if(s_cbb<oo3346){if(s_cbb<oo3347){a1=cbb0000008393({});a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008394(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}else{a1=cbb0000001593(a4);a2=cbb0000001594(a1);a1=a1>=a2;a2=cbb0000001595(a1);(function(){a1=cbb0000001596(a2);a2=cbb0000001597(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001598(a1);})();}}else{a1=cbb000000657(a5);a2=cbb000000658(a3);a3=cbb000000659(a1);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb000000660(a3);a2=cbb000000661(a1);a1=a1<a2;a1=cbb000000662(a1);}}else s_cbb<oo3359?s_cbb<oo3360?s_cbb<oo3361?(a1=cbb0000004487(a4),a2=cbb0000004488(a5),a1=a1-a2,a6=cbb0000004489(a1),a5=cbb0000004490(a3),a1=cbb0000004491(~a5)):(a1=cbb0000007822(),a2=cbb0000007823(),a1=a2!=a1,cbb0000007824(a1),a1=cbb0000007825(a4),a2=cbb0000007826(a5),a1=a1-a2,a6=cbb0000007827(a1)):(a1=cbb0000005328(cbbb),a2=cbb0000005329(a1),a1=a1<=a2,a5=cbb0000005330(a1),a5=cbb0000005331(a3),a4=cbb0000005332(!a5)):s_cbb<oo3362?s_cbb<oo3363?(a1=cbb000000336(),a2=cbb000000337(),a1=a2!=a1,cbb000000338(a1),a1=cbb000000339(a5),a2=cbb000000340(a6),a1=a2*a1,a7=cbb000000341(a1)):(a1=cbb0000007992(a3),a2=cbb0000007993(a4),a1=a1>>a2,a5=cbb0000007994(a1),a1=shuz[start++],a2=cbb0000007995(a1)):(a1=cbb0000004577(a2),a2=cbb0000004578(a1),a1=a2==a1,a4=cbb0000004579(a1),a5=cbb0000004580(a3),a1=cbb0000004581(~a5));}else if(s_cbb<oo3364){if(s_cbb<oo3365){if(s_cbb<oo3366){if(s_cbb<oo3367){if(s_cbb<oo3368){if(s_cbb<oo3369){if(s_cbb<oo3370){if(s_cbb<oo3371){if(s_cbb<oo3372){if(s_cbb<oo3373){a1=cbb0000002007({});a1=cbb0000002008(a3);a2=cbb0000002009(a4);a1=a1%a2;a5=cbb0000002010(a1);}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008415(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008416(a2);a2=cbb0000008417(a1);a1=a2==a1;a4=cbb0000008418(a1);}}else if(s_cbb<oo3385){(function(){debugger;})();a2=cbb0000001341(allthis);}else{a5=cbb0000001048(a1);a2=cbb0000001049(void a5);return;}}else s_cbb<oo3386?s_cbb<oo3387?(a1=cbb0000002758(a2),a2=cbb0000002759(a1),a1=a1/a2,a3=cbb0000002760(a1),a5=cbb0000002761(a1),a2=cbb0000002762(void a5)):(a1=shuz[start++],a2=cbb0000007514(a3),a3=cbb0000007515(a1),a2[constantPool[a1]]=a3,a1=cbb0000007516(a2),a2=cbb0000007517(a1),a1=a2==a1,a4=cbb0000007518(a1)):(a1=cbb0000003388(a3),a2=cbb0000003389(a4),a1=a1%a2,a5=cbb0000003390(a1),a1=cbb0000003391(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828);}else s_cbb<oo3389?s_cbb<oo3390?s_cbb<oo3391?(a2=cbb0000001991([]),a1=cbb0000001992(a5),a2=cbb0000001993(a6),a1=a2*a1,a7=cbb0000001994(a1)):(a1=cbb0000006806(a5),a2=cbb0000006807(a6),a1=a2*a1,a7=cbb0000006808(a1),a1=cbb0000006809(a6),a2=cbb0000006810(a7),a1=a1|a2,a8=cbb0000006811(a1)):s_cbb<oo3392?(a1=cbb0000004051(),a2=cbb0000004052(),a1=a2!=a1,cbb0000004053(a1),a2=cbb0000004054(allthis)):(a1=cbb0000007703(a5),a2=cbb0000007704(a6),a1=a2!==a1,a1=cbb0000007705(a1),a1=cbb0000007706(a2),a2=cbb0000007707(a1),a1=a1+a2,a1=cbb0000007708(a1)):s_cbb<oo3393?s_cbb<oo3394?(a1=cbb0000007713(a2),a2=cbb0000007714(a1),a1=a1+a2,a1=cbb0000007715(a1),a1=cbb0000007716(cbbb),a2=cbb0000007717(a1),a1=a1<=a2,a5=cbb0000007718(a1)):(a1=cbb000000837(a5),a2=cbb000000838(a9),a1=a2<a1,a6=cbb000000839(a1),a1=cbb000000840(a3),a2=cbb000000841(a4),a1=a1>>a2,a5=cbb000000842(a1)):(a1=cbb0000007663(a5),a2=cbb0000007664(a6),a1=a2*a1,a7=cbb0000007665(a1),a1=cbb0000007666(a2),a2=cbb0000007667(a1),a1=a1+a2,a1=cbb0000007668(a1));}else if(s_cbb<oo3395){if(s_cbb<oo3396){s_cbb<oo3397?s_cbb<oo3398?(a1=cbb0000001342(a5),a2=cbb0000001343(a6),a1=a2*a1,a7=cbb0000001344(a1),function(){debugger;}()):(a1=cbb000000870(a2),a2=cbb000000871(a3),a1=a1&a2,a3=cbb000000872(a1),a1=cbb000000873(a5),a2=cbb000000874(a9),a1=a2<a1,a6=cbb000000875(a1)):s_cbb<oo3399?(function(){a1=shuz[start++];a3=cbb0000001770(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001771(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001772(a4);}(),a1=cbb0000001773(a2),a2=cbb0000001774(a1),a1=a2==a1,a4=cbb0000001775(a1)):(a1=cbb000000495(a2),a2=cbb000000496(a1),a1=a2-a1,a1=cbb000000497(a1),a1=cbb000000498(a3),a2=cbb000000499(a1),a1=a1<a2,a1=cbb000000500(a1));}else if(s_cbb<oo3403){if(s_cbb<oo3404){a1=cbb0000002913(a3);a2=cbb0000002914(a1);a3=delete a2[a1];a1=cbb0000002915(a3);a1=cbb0000002916(a5);a2=cbb0000002917(a6);a1=a2*a1;a7=cbb0000002918(a1);}else{a1=cbb0000001270(a6);a2=cbb0000001271(a1);a1=a1>>>a2;a2=cbb0000001272(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else{a1=cbb0000002863(a3);a2=cbb0000002864(a1);a3=delete a2[a1];a1=cbb0000002865(a3);a1=cbb0000002866(a3);a2=cbb0000002867(a3);a1=a1>a2;a2=cbb0000002868(a1);}}else s_cbb<oo3411?s_cbb<oo3412?s_cbb<oo3413?(a1=cbb0000005052({}),a1=cbb0000005053(a3),a2=cbb0000005054(a3),a1=a1>a2,a2=cbb0000005055(a1)):(a1=shuz[start++],a2=cbb0000008057(constantPool[a1]),a1=cbb0000008058(a5),a2=cbb0000008059(a9),a1=a2<a1,a6=cbb0000008060(a1)):s_cbb<oo3414?(a1=cbb0000006951(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000006952(a2),a2=cbb0000006953(a1),a1=a2===a1,a3=cbb0000006954(a1)):(a1=cbb0000006891(a2),a2=cbb0000006892(a1),a1=a2-a1,a1=cbb0000006893(a1),a1=cbb0000006894(a5),a2=cbb0000006895(a6),a1=a2*a1,a7=cbb0000006896(a1)):s_cbb<oo3416?s_cbb<oo3417?(a1=cbb0000007045(a6),a2=cbb0000007046(a7),a1=a1|a2,a8=cbb0000007047(a1),a1=cbb0000007048(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):(a1=cbb0000008225(a5),a2=cbb0000008226(a6),a1=a2!==a1,a1=cbb0000008227(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008228(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb0000005282(a3),a2=cbb0000005283(a1),a1=a1<a2,a1=cbb0000005284(a1),a1=cbb0000005285(cbbb),a2=cbb0000005286(a1),a1=a1<=a2,a5=cbb0000005287(a1));}else if(s_cbb<oo3420){if(s_cbb<oo3421){if(s_cbb<oo3422){if(s_cbb<oo3423){s_cbb<oo3424?(a1=cbb0000005255(a4),a2=cbb0000005256(a1),a1=a1>=a2,a2=cbb0000005257(a1),a1=cbb0000005258(a2),a2=cbb0000005259(a1),a1=a2-a1,a1=cbb0000005260(a1)):(a1=shuz[start++],a2=cbb0000007529(a3),a3=cbb0000007530(a1),a2[constantPool[a1]]=a3,a5=cbb0000007531(a3),a4=cbb0000007532(!a5));}else if(s_cbb<oo3425){a1=cbb0000003913(a5);a2=cbb0000003914(a9);a1=a2<a1;a6=cbb0000003915(a1);(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003916()):a4.splice(oo4806,oo4806,cbb0000003917());}a1=cbb0000003918(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}})();}else{a1=cbb0000003515(a6);a2=cbb0000003516(a7);a1=a1|a2;a8=cbb0000003517(a1);a1=cbb0000003518(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003519(a1)):a9=oo4829;;}}else s_cbb<oo3436?s_cbb<oo3437?(a1=cbb0000003651({}),a5=cbb0000003652(a2),a2=cbb0000003653(-a5)):(a1=cbb0000005288(a6),a2=cbb0000005289(a7),a1=a1|a2,a8=cbb0000005290(a1),a1=cbb0000005291(cbbb),a2=cbb0000005292(a1),a1=a1<=a2,a5=cbb0000005293(a1)):(a1=cbb0000007475(cbbb),a2=cbb0000007476(a1),a1=a1<=a2,a5=cbb0000007477(a1),a1=shuz[start++],a2=cbb0000007478(a3),a3=cbb0000007479(a1),a2[constantPool[a1]]=a3);}else s_cbb<oo3438?s_cbb<oo3439?s_cbb<oo3440?(a1=cbb0000004853(a2),a2=cbb0000004854(a1),a1=a2==a1,a4=cbb0000004855(a1),a1=cbb0000004856(a3),a2=cbb0000004857(a2),a1=a1<<a2,a1=cbb0000004858(a1)):(function(){i=[];a2=cbb0000002597(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002598(a4),a2=cbb0000002599(a5),a1=a1-a2,a6=cbb0000002600(a1)):s_cbb<oo3441?(a1=cbb0000004653(a3),a2=cbb0000004654(a1),a2.push(a1),a1=cbb0000004655(a2),a1=cbb0000004656(a4),a2=cbb0000004657(a5),a1=a1-a2,a6=cbb0000004658(a1)):(a1=cbb0000006840(a2),a2=cbb0000006841(a1),a1=a2==a1,a4=cbb0000006842(a1),a1=cbb0000006843(a5),a2=cbb0000006844(a6),a1=a2*a1,a7=cbb0000006845(a1)):s_cbb<oo3442?s_cbb<oo3443?(a1=cbb0000001563(a3),a2=cbb0000001564(a4),a1=a1>>a2,a5=cbb0000001565(a1),function(){a1=cbb0000001566(a2);a2=cbb0000001567(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001568(a1);}()):(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008270(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008271(a2),a2=cbb0000008272(a1),a1=a1+a2,a1=cbb0000008273(a1)):(a1=cbb0000001438(a5),a2=cbb0000001439(a6),a1=a2*a1,a7=cbb0000001440(a1),a1=cbb0000001441(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820);}else if(s_cbb<oo3446){s_cbb<oo3447?s_cbb<oo3448?s_cbb<oo3449?(a5=cbb0000003804(a3),a1=cbb0000003805(typeof a5),a1=cbb0000003806(a2),a2=cbb0000003807(a1),a1=a2===a1,a3=cbb0000003808(a1)):(a1=cbb0000005591(a2),a2=cbb0000005592(a3),a1=a1&a2,a3=cbb0000005593(a1),a1=cbb0000005594(),a2=cbb0000005595(),a1=a2!=a1,cbb0000005596(a1)):s_cbb<oo3450?(a1=cbb000000417(a2),a2=cbb000000418(a1),a1=a2-a1,a1=cbb000000419(a1),a1=cbb000000420(a4),a2=cbb000000421(a5),a1=a1-a2,a6=cbb000000422(a1)):(a5=cbb0000002371(a3),a4=cbb0000002372(!a5),a1=cbb0000002373(a2),a2=cbb0000002374(a1),a1=a2-a1,a1=cbb0000002375(a1)):s_cbb<oo3451?s_cbb<oo3452?(a5=cbb0000002351(a3),a4=cbb0000002352(!a5),a1=cbb0000002353(a2),a2=cbb0000002354(a3),a1=a1&a2,a3=cbb0000002355(a1)):(a1=cbb000000634(a5),a2=cbb000000635(a3),a3=cbb000000636(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000637(),a2=cbb000000638(),a1=a2!=a1,cbb000000639(a1)):(a1=cbb000000389(a5),a2=cbb000000390(a9),a1=a2<a1,a6=cbb000000391(a1),a1=cbb000000392(a2),a2=cbb000000393(a1),a1=a2-a1,a1=cbb000000394(a1));}else if(s_cbb<oo3453){if(s_cbb<oo3454){if(s_cbb<oo3455){a2=cbb000000985(allthis);return;}else{a1=cbb0000006390(a2);a2=cbb0000006391(a1);a1=a1/a2;a3=cbb0000006392(a1);a1=cbb0000006393({});}}else s_cbb<oo3456?(a1=cbb0000004906({}),a1=cbb0000004907(a2),a2=cbb0000004908(a1),a1=a2==a1,a4=cbb0000004909(a1)):(a2=cbb0000004495([]),a5=cbb0000004496(a3),a1=cbb0000004497(~a5));}else s_cbb<oo3457?s_cbb<oo3458?(a1=cbb0000002881(a3),a2=cbb0000002882(a1),a3=delete a2[a1],a1=cbb0000002883(a3),a2=cbb0000002884([])):(a1=cbb0000001335(a3),a2=cbb0000001336(a4),a1=a1>>a2,a5=cbb0000001337(a1),function(){debugger;}()):(a1=cbb0000002058(a2),a2=cbb0000002059(a1),a1=a2-a1,a1=cbb0000002060(a1),a1=cbb0000002061({}));}else if(s_cbb<oo3459){if(s_cbb<oo3460){s_cbb<oo3461?s_cbb<oo3462?s_cbb<oo3463?s_cbb<oo3464?(a1=cbb0000006587(a3),a2=cbb0000006588(a4),a1=a1^a2,a5=cbb0000006589(a1),a1=cbb0000006590(a2),a2=cbb0000006591(a1),a1=a1/a2,a3=cbb0000006592(a1)):(a1=cbb0000005692(a6),a2=cbb0000005693(a1),a1=a1>>>a2,a2=cbb0000005694(a1),a1=cbb0000005695(cbbb),a2=cbb0000005696(a1),a1=a1<=a2,a5=cbb0000005697(a1)):s_cbb<oo3465?(a1=cbb0000003158(a2),a2=cbb0000003159(a1),a1=a2===a1,a3=cbb0000003160(a1),a1=cbb0000003161({})):(a1=cbb0000003292(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003293(a2),a2=cbb0000003294(a1),a1=a2===a1,a3=cbb0000003295(a1)):s_cbb<oo3467?s_cbb<oo3468?(function(){debugger;}(),a1=cbb000000849(a5),a2=cbb000000850(a9),a1=a2<a1,a6=cbb000000851(a1)):(a1=cbb0000005214(a4),a2=cbb0000005215(a1),a1=a1>=a2,a2=cbb0000005216(a1),a1=cbb0000005217(a3),a2=cbb0000005218(a4),a1=a1^a2,a5=cbb0000005219(a1)):(a1=cbb0000005153(a4),a2=cbb0000005154(a1),a1=a1>=a2,a2=cbb0000005155(a1),a1=cbb0000005156(a3),a2=cbb0000005157(a4),a1=a1%a2,a5=cbb0000005158(a1)):s_cbb<oo3469?s_cbb<oo3470?s_cbb<oo3471?(a1=cbb000000614(a2),a2=cbb000000615(a1),a1=a2===a1,a3=cbb000000616(a1),a1=cbb000000617(a5),a2=cbb000000618(a3),a3=cbb000000619(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb0000002695(a4),a2=cbb0000002696(a1),a1=a1>=a2,a2=cbb0000002697(a1),a5=cbb0000002698(a1),a2=cbb0000002699(void a5)):s_cbb<oo3472?(a1=cbb0000007026(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000007027(a2),a2=cbb0000007028(a1),a1=a2==a1,a4=cbb0000007029(a1)):(a1=cbb0000002040({}),a1=cbb0000002041(a4),a2=cbb0000002042(a1),a1=a1>=a2,a2=cbb0000002043(a1)):s_cbb<oo3474?s_cbb<oo3475?(a5=cbb0000003631(a2),a2=cbb0000003632(-a5),a1=cbb0000003633(a3),a2=cbb0000003634(a4),a1=a1>>a2,a5=cbb0000003635(a1)):(function(){debugger;}(),a1=cbb0000001300(a5),a2=cbb0000001301(a6),a1=a2*a1,a7=cbb0000001302(a1)):(a1=cbb0000002559(a3),a2=cbb0000002560(a4),a1=a1%a2,a5=cbb0000002561(a1),function(){i=[];a2=cbb0000002562(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}());}else if(s_cbb<oo3476){if(s_cbb<oo3477){if(s_cbb<oo3478){if(s_cbb<oo3479){a1=cbb0000001000(a2);a2=cbb0000001001(a1);a1=a1/a2;a3=cbb0000001002(a1);return;}else{(function(){i=[];a2=cbb0000002607(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;})();a1=cbb0000002608({});}}else s_cbb<oo3480?(a5=cbb0000002395(a3),a4=cbb0000002396(!a5),a1=cbb0000002397(a2),a2=cbb0000002398(a3),a1=a1&a2,a3=cbb0000002399(a1)):(a1=cbb0000003865(a3),a2=cbb0000003866(a2),a1=a1<<a2,a1=cbb0000003867(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003868()):a4.splice(oo4806,oo4806,cbb0000003869());}a1=cbb0000003870(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}());}else s_cbb<oo3490?s_cbb<oo3491?(a1=cbb0000005018(a6),a2=cbb0000005019(a7),a1=a1|a2,a8=cbb0000005020(a1),a1=cbb0000005021(a3),a2=cbb0000005022(a3),a1=a1>a2,a2=cbb0000005023(a1)):(a1=cbb0000005304(cbbb),a2=cbb0000005305(a1),a1=a1<=a2,a5=cbb0000005306(a1),a1=cbb0000005307(cbbb),a2=cbb0000005308(a1),a1=a1<=a2,a5=cbb0000005309(a1)):(a1=cbb0000006650(a3),a2=cbb0000006651(a4),a1=a1%a2,a5=cbb0000006652(a1),a1=cbb0000006653(a4),a2=cbb0000006654(a1),a1=a1>=a2,a2=cbb0000006655(a1));}else s_cbb<oo3492?s_cbb<oo3493?s_cbb<oo3494?(a1=cbb0000008006(a3),a2=cbb0000008007(a1),a1=a1<a2,a1=cbb0000008008(a1),a1=shuz[start++],a2=cbb0000008009(constantPool[a1])):(a1=cbb000000429(a2),a2=cbb000000430(a1),a1=a2-a1,a1=cbb000000431(a1),a1=cbb000000432({})):s_cbb<oo3495?(a1=cbb0000003703(a3),a2=cbb0000003704(a1),a1=a1<a2,a1=cbb0000003705(a1),a5=cbb0000003706(a3),a1=cbb0000003707(typeof a5)):(a1=cbb0000004378(a2),a2=cbb0000004379(a1),a1=a2===a1,a3=cbb0000004380(a1),a1=cbb0000004381(a2),a2=cbb0000004382(a3),a3=cbb0000004383(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004384(a3)):s_cbb<oo3496?s_cbb<oo3497?(a1=cbb0000007609(a2),a2=cbb0000007610(a1),a1=a1+a2,a1=cbb0000007611(a1),a1=cbb0000007612(a6),a2=cbb0000007613(a7),a1=a1|a2,a8=cbb0000007614(a1)):(a1=cbb0000002555(a2),a2=cbb0000002556(a3),a1=a1&a2,a3=cbb0000002557(a1),function(){i=[];a2=cbb0000002558(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}()):(a1=cbb0000006695(a3),a2=cbb0000006696(a4),a1=a1%a2,a5=cbb0000006697(a1),a1=cbb0000006698({}));}else if(s_cbb<oo3498){if(s_cbb<oo3499){s_cbb<oo3500?s_cbb<oo3501?s_cbb<oo3502?(function(){a1=cbb0000001641(a2);a2=cbb0000001642(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001643(a1);}(),a2=cbb0000001644([])):(a1=shuz[start++],a2=cbb0000008077(constantPool[a1]),a1=cbb0000008078(a3),a2=cbb0000008079(a4),a1=a1>>a2,a5=cbb0000008080(a1)):s_cbb<oo3503?(a1=cbb0000007639(a2),a2=cbb0000007640(a1),a1=a1+a2,a1=cbb0000007641(a1),a1=cbb0000007642(a4),a2=cbb0000007643(a1),a1=a1>=a2,a2=cbb0000007644(a1)):(a5=cbb0000001972(a3),a4=cbb0000001973(!a5),a2=cbb0000001974([])):s_cbb<oo3504?s_cbb<oo3505?(a1=cbb0000005007(a3),a2=cbb0000005008(a3),a1=a1>a2,a2=cbb0000005009(a1),a5=cbb0000005010(a3),a4=cbb0000005011(!a5)):(a1=cbb0000004731(a3),a2=cbb0000004732(a1),a2.push(a1),a1=cbb0000004733(a2),a1=cbb0000004734(a3),a2=cbb0000004735(a4),a1=a1^a2,a5=cbb0000004736(a1)):(a1=cbb0000006232(a5),a2=cbb0000006233(a9),a1=a2<a1,a6=cbb0000006234(a1),a1=cbb0000006235(a3),a2=cbb0000006236(a2),a1=a1<<a2,a1=cbb0000006237(a1));}else if(s_cbb<oo3506){if(s_cbb<oo3507){if(s_cbb<oo3508){a5=cbb0000003679(a3);a4=cbb0000003680(!a5);a5=cbb0000003681(a3);a1=cbb0000003682(typeof a5);}else{a1=cbb0000001027(a3);a2=cbb0000001028(a1);a1=a1<a2;a1=cbb0000001029(a1);return;}}else s_cbb<oo3509?(a1=shuz[start++],start+=a1,a1=cbb0000001535(a5),a2=cbb0000001536(a6),a1=a2*a1,a7=cbb0000001537(a1)):(a1=cbb0000006638(a3),a2=cbb0000006639(a4),a1=a1>>a2,a5=cbb0000006640(a1),a1=cbb0000006641(a3),a2=cbb0000006642(a4),a1=a1%a2,a5=cbb0000006643(a1));}else s_cbb<oo3510?s_cbb<oo3511?(a1=shuz[start++],a2=cbb0000007972(a1),a1=cbb0000007973(a6),a2=cbb0000007974(a7),a1=a1|a2,a8=cbb0000007975(a1)):(a1=cbb0000005322(cbbb),a2=cbb0000005323(a1),a1=a1<=a2,a5=cbb0000005324(a1),a1=cbb0000005325(a3),a2=cbb0000005326(a3),a1=a1>a2,a2=cbb0000005327(a1)):(a1=cbb0000006202(a3),a2=cbb0000006203(a2),a1=a1<<a2,a1=cbb0000006204(a1),a1=cbb0000006205(a2),a2=cbb0000006206(a3),a1=a1&a2,a3=cbb0000006207(a1));}else if(s_cbb<oo3512){if(s_cbb<oo3513){if(s_cbb<oo3514){s_cbb<oo3515?(a1=cbb0000003015(a5),a2=cbb0000003016(a6),a1=a2!==a1,a1=cbb0000003017(a1),a1=cbb0000003018(),a2=cbb0000003019(),a1=a2!=a1,cbb0000003020(a1)):(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003821()):a4.splice(oo4806,oo4806,cbb0000003822());}a1=cbb0000003823(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a2=cbb0000003824(allthis));}else if(s_cbb<oo3525){a1=cbb0000008345(a2);a2=cbb0000008346(a1);a1=a1+a2;a1=cbb0000008347(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008348(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}else{a1=cbb000000599(a5);a2=cbb000000600(a3);a3=cbb000000601(a1);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a5=cbb000000602(a1);a2=cbb000000603(void a5);}}else s_cbb<oo3537?s_cbb<oo3538?(a1=cbb0000004010(a2),a2=cbb0000004011(a1),a1=a2===a1,a3=cbb0000004012(a1),a2=cbb0000004013(allthis)):(a1=cbb0000005555(a2),a2=cbb0000005556(a3),a1=a1&a2,a3=cbb0000005557(a1),a1=cbb0000005558(a5),a2=cbb0000005559(a6),a1=a2*a1,a7=cbb0000005560(a1)):(function(){debugger;}(),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004116(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004117(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004118(a4);}());}else s_cbb<oo3546?s_cbb<oo3547?s_cbb<oo3548?(a1=cbb0000005091(a4),a2=cbb0000005092(a1),a1=a1>=a2,a2=cbb0000005093(a1),a1=cbb0000005094(a2),a2=cbb0000005095(a1),a1=a2==a1,a4=cbb0000005096(a1)):(a1=shuz[start++],a2=cbb0000008049(constantPool[a1]),a1=cbb0000008050(a2),a2=cbb0000008051(a1),a1=a2==a1,a4=cbb0000008052(a1)):(a1=cbb0000004725(a2),a2=cbb0000004726(a3),a1=a1&a2,a3=cbb0000004727(a1),a1=cbb0000004728(a3),a2=cbb0000004729(a1),a2.push(a1),a1=cbb0000004730(a2)):s_cbb<oo3549?s_cbb<oo3550?(a1=cbb0000007921(a5),a2=cbb0000007922(a6),a1=a2!==a1,a1=cbb0000007923(a1),a1=shuz[start++],a2=cbb0000007924(a1)):(a1=shuz[start++],a2=cbb0000008089(constantPool[a1]),a1=cbb0000008090(a2),a2=cbb0000008091(a1),a1=a2==a1,a4=cbb0000008092(a1)):(a1=cbb0000006735(a3),a2=cbb0000006736(a4),a1=a1%a2,a5=cbb0000006737(a1),a1=cbb0000006738(a4),a2=cbb0000006739(a1),a1=a1>=a2,a2=cbb0000006740(a1));}else if(s_cbb<oo3551){if(s_cbb<oo3552){if(s_cbb<oo3553){if(s_cbb<oo3554){if(s_cbb<oo3555){s_cbb<oo3556?s_cbb<oo3557?(a1=cbb0000006599(a6),a2=cbb0000006600(a1),a1=a1>>>a2,a2=cbb0000006601(a1),a1=cbb0000006602(a3),a2=cbb0000006603(a4),a1=a1^a2,a5=cbb0000006604(a1)):(a1=cbb0000001162(a4),a2=cbb0000001163(a5),a1=a1-a2,a6=cbb0000001164(a1),j=cbb0000001165(a1),j2=cbb0000001166(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):s_cbb<oo3558?(a1=cbb0000005952(a6),a2=cbb0000005953(a7),a1=a1|a2,a8=cbb0000005954(a1),a1=cbb0000005955(a2),a2=cbb0000005956(a1),a1=a2===a1,a3=cbb0000005957(a1)):(a1=cbb0000005197(a2),a2=cbb0000005198(a1),a1=a1+a2,a1=cbb0000005199(a1),a1=cbb0000005200(a4),a2=cbb0000005201(a1),a1=a1>=a2,a2=cbb0000005202(a1));}else if(s_cbb<oo3559){s_cbb<oo3560?(a1=cbb0000006747(a3),a2=cbb0000006748(a2),a1=a1<<a2,a1=cbb0000006749(a1),a1=cbb0000006750(a3),a2=cbb0000006751(a4),a1=a1%a2,a5=cbb0000006752(a1)):(a1=cbb0000004632(a3),a2=cbb0000004633(a1),a2.push(a1),a1=cbb0000004634(a2),function(){debugger;}());}else{a1=cbb0000003489(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003490(a1)):a9=oo4829;;a1=cbb0000003491(a3);a2=cbb0000003492(a3);a1=a1>a2;a2=cbb0000003493(a1);}}else s_cbb<oo3562?s_cbb<oo3563?s_cbb<oo3564?(a1=cbb0000005061(a6),a2=cbb0000005062(a1),a1=a1>>>a2,a2=cbb0000005063(a1),a1=cbb0000005064(a3),a2=cbb0000005065(a3),a1=a1>a2,a2=cbb0000005066(a1)):(a1=cbb0000004254(a4),a2=cbb0000004255(a1),a1=a1>=a2,a2=cbb0000004256(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004257(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004258(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004259(a4);}()):s_cbb<oo3572?(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003842()):a4.splice(oo4806,oo4806,cbb0000003843());}a1=cbb0000003844(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003845(cbbb),a2=cbb0000003846(a1),a1=a1<=a2,a5=cbb0000003847(a1)):(a1=cbb0000007840(a5),a2=cbb0000007841(a9),a1=a2<a1,a6=cbb0000007842(a1),a1=cbb0000007843(a4),a2=cbb0000007844(a5),a1=a1-a2,a6=cbb0000007845(a1)):s_cbb<oo3582?s_cbb<oo3583?(a1=cbb0000001913(a3),a2=cbb0000001914(a1),a1=a1<a2,a1=cbb0000001915(a1),a2=cbb0000001916([])):(a1=cbb0000005272(a2),a2=cbb0000005273(a1),a1=a2===a1,a3=cbb0000005274(a1),a1=cbb0000005275(cbbb),a2=cbb0000005276(a1),a1=a1<=a2,a5=cbb0000005277(a1)):(all=cbbb,a1=cbb0000007420(cbbb),a5=cbb0000007421(a3),a4=cbb0000007422(!a5));}else if(s_cbb<oo3584){if(s_cbb<oo3585){if(s_cbb<oo3586){if(s_cbb<oo3587){a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001279(a2);a2=cbb0000001280(a1);a1=a1+a2;a1=cbb0000001281(a1);}else{a1=cbb0000008341(a6);a2=cbb0000008342(a1);a1=a1>>>a2;a2=cbb0000008343(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008344(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo3605?(a1=cbb0000004833(a2),a2=cbb0000004834(a1),a1=a2==a1,a4=cbb0000004835(a1),a2=cbb0000004836([])):(a1=cbb000000807(a3),a2=cbb000000808(a4),a1=a1%a2,a5=cbb000000809(a1),a1=cbb000000810(a5),a2=cbb000000811(a9),a1=a2<a1,a6=cbb000000812(a1));}else s_cbb<oo3606?s_cbb<oo3607?(a5=cbb0000006621(a1),a2=cbb0000006622(void a5),a1=cbb0000006623(a3),a2=cbb0000006624(a4),a1=a1%a2,a5=cbb0000006625(a1)):(all=cbbb,a1=cbb0000007427(cbbb),a1=cbb0000007428(a5),a2=cbb0000007429(a6),a1=a2*a1,a7=cbb0000007430(a1)):(a1=cbb000000669(a5),a2=cbb000000670(a3),a3=cbb000000671(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000672(a2),a2=cbb000000673(a3),a1=a1&a2,a3=cbb000000674(a1));}else if(s_cbb<oo3608){s_cbb<oo3609?s_cbb<oo3610?(function(){i=[];a2=cbb0000002585(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002586(a5),a2=cbb0000002587(a9),a1=a2<a1,a6=cbb0000002588(a1)):(a1=cbb0000001839(a3),a2=cbb0000001840(a3),a1=a1>a2,a2=cbb0000001841(a1),function(){a1=shuz[start++];a3=cbb0000001842(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001843(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001844(a4);}()):s_cbb<oo3614?(a1=cbb0000006447(a6),a2=cbb0000006448(a1),a1=a1>>>a2,a2=cbb0000006449(a1),a1=cbb0000006450(a3),a2=cbb0000006451(a4),a1=a1^a2,a5=cbb0000006452(a1)):(a1=cbb0000002003({}),a1=cbb0000002004(a2),a2=cbb0000002005(a1),a1=a2-a1,a1=cbb0000002006(a1));}else if(s_cbb<oo3615){if(s_cbb<oo3616){a5=cbb0000002718(a1);a2=cbb0000002719(void a5);a1=cbb0000002720(a5);a2=cbb0000002721(a6);a1=a2*a1;a7=cbb0000002722(a1);}else{a1=cbb0000001239(a5);a2=cbb0000001240(a6);a1=a2*a1;a7=cbb0000001241(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else{a1=cbb0000006489(a3);a2=cbb0000006490(a2);a1=a1<<a2;a1=cbb0000006491(a1);a1=cbb0000006492(a3);a2=cbb0000006493(a4);a1=a1^a2;a5=cbb0000006494(a1);}}else if(s_cbb<oo3623){if(s_cbb<oo3624){s_cbb<oo3625?s_cbb<oo3626?s_cbb<oo3627?(a1=cbb0000004992(a5),a2=cbb0000004993(a6),a1=a2!==a1,a1=cbb0000004994(a1),a1=cbb0000004995(a3),a2=cbb0000004996(a3),a1=a1>a2,a2=cbb0000004997(a1)):(a1=cbb0000007314(a2),a2=cbb0000007315(a1),a1=a2===a1,a3=cbb0000007316(a1),a1=cbb0000007317(a3),a2=cbb0000007318(a1),a1=a1<a2,a1=cbb0000007319(a1)):s_cbb<oo3628?(a1=shuz[start++],a2=cbb0000008010(constantPool[a1]),a1=cbb0000008011(a5),a2=cbb0000008012(a6),a1=a2!==a1,a1=cbb0000008013(a1)):(a1=cbb000000248(),a2=cbb000000249(),a1=a2!=a1,cbb000000250(a1),a1=cbb000000251(a6),a2=cbb000000252(a1),a1=a1>>>a2,a2=cbb000000253(a1)):s_cbb<oo3629?s_cbb<oo3630?(a1=shuz[start++],a2=cbb0000007497(a3),a3=cbb0000007498(a1),a2[constantPool[a1]]=a3,a1=cbb0000007499(a4),a2=cbb0000007500(a1),a1=a1>=a2,a2=cbb0000007501(a1)):(a1=cbb0000001139(a3),a2=cbb0000001140(a1),a1=a1<a2,a1=cbb0000001141(a1),j=cbb0000001142(a1),j2=cbb0000001143(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a2=cbb0000004065(allthis),a1=cbb0000004066(a2),a2=cbb0000004067(a3),a1=a1&a2,a3=cbb0000004068(a1));}else if(s_cbb<oo3631){s_cbb<oo3632?s_cbb<oo3633?(a1=cbb0000006950(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,function(){debugger;}()):(a1=cbb0000003343(a5),a2=cbb0000003344(a6),a1=a2*a1,a7=cbb0000003345(a1),a1=cbb0000003346(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):s_cbb<oo3636?(a5=cbb0000002417(a3),a4=cbb0000002418(!a5),a1=cbb0000002419(a2),a2=cbb0000002420(a1),a1=a1+a2,a1=cbb0000002421(a1)):(a1=cbb0000003312(cbbb),a2=cbb0000003313(a1),a1=a1<=a2,a5=cbb0000003314(a1),a1=cbb0000003315(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828);}else if(s_cbb<oo3638){s_cbb<oo3639?(a1=cbb0000008286(a3),a2=cbb0000008287(a3),a1=a1>a2,a2=cbb0000008288(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008289(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb0000001077(a2),a2=cbb0000001078(a1),a1=a2==a1,a4=cbb0000001079(a1),j=cbb0000001080(a1),j2=cbb0000001081(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1);}else{a1=cbb0000003449(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003450(a1)):a9=oo4829;;a1=cbb0000003451(a4);a2=cbb0000003452(a5);a1=a1-a2;a6=cbb0000003453(a1);}}else s_cbb<oo3642?s_cbb<oo3643?s_cbb<oo3644?s_cbb<oo3645?(a1=cbb0000005627(a3),a2=cbb0000005628(a2),a1=a1<<a2,a1=cbb0000005629(a1),a1=cbb0000005630(a6),a2=cbb0000005631(a1),a1=a1>>>a2,a2=cbb0000005632(a1)):(a1=cbb0000003871(a2),a2=cbb0000003872(a1),a1=a1+a2,a1=cbb0000003873(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003874()):a4.splice(oo4806,oo4806,cbb0000003875());}a1=cbb0000003876(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):s_cbb<oo3655?(a1=cbb0000005147(a4),a2=cbb0000005148(a1),a1=a1>=a2,a2=cbb0000005149(a1),a1=cbb0000005150(a3),a2=cbb0000005151(a2),a1=a1<<a2,a1=cbb0000005152(a1)):(a1=cbb0000001365(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001366(a3),a2=cbb0000001367(a2),a1=a1<<a2,a1=cbb0000001368(a1)):s_cbb<oo3657?s_cbb<oo3658?(a1=cbb0000003190(a2),a2=cbb0000003191(a1),a1=a1/a2,a3=cbb0000003192(a1),a1=cbb0000003193(a2),a2=cbb0000003194(a1),a1=a2===a1,a3=cbb0000003195(a1)):(a5=cbb0000007936(a1),a2=cbb0000007937(void a5),a1=shuz[start++],a2=cbb0000007938(a1)):(a1=shuz[start++],a2=cbb0000008111(constantPool[a1]),a1=cbb0000008112(),a2=cbb0000008113(),a1=a2!=a1,cbb0000008114(a1)):s_cbb<oo3659?s_cbb<oo3660?s_cbb<oo3661?(a1=cbb0000005703(a6),a2=cbb0000005704(a1),a1=a1>>>a2,a2=cbb0000005705(a1),a1=cbb0000005706(a3),a2=cbb0000005707(a4),a1=a1%a2,a5=cbb0000005708(a1)):(a1=cbb0000001885(a5),a2=cbb0000001886(a9),a1=a2<a1,a6=cbb0000001887(a1),function(){a1=shuz[start++];a3=cbb0000001888(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001889(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001890(a4);}()):s_cbb<oo3665?(a5=cbb0000001391(a1),a2=cbb0000001392(void a5),a1=cbb0000001393(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):(a1=cbb0000001394(a5),a2=cbb0000001395(a6),a1=a2*a1,a7=cbb0000001396(a1),a1=cbb0000001397(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):s_cbb<oo3668?s_cbb<oo3669?(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008157(a1),a2=cbb0000008158([])):(a1=cbb0000004544(a5),a2=cbb0000004545(a6),a1=a2!==a1,a1=cbb0000004546(a1),a5=cbb0000004547(a3),a1=cbb0000004548(~a5)):(a1=cbb0000003230(a2),a2=cbb0000003231(a1),a1=a2===a1,a3=cbb0000003232(a1),a1=cbb0000003233(a6),a2=cbb0000003234(a7),a1=a1|a2,a8=cbb0000003235(a1));}else if(s_cbb<oo3670){if(s_cbb<oo3671){if(s_cbb<oo3672){s_cbb<oo3673?s_cbb<oo3674?s_cbb<oo3675?(a2=cbb0000001911([]),a2=cbb0000001912([])):(a1=cbb0000001476(a2),a2=cbb0000001477(a1),a1=a1+a2,a1=cbb0000001478(a1),a1=shuz[start++],start+=a1):s_cbb<oo3676?(a1=cbb0000004415(a2),a2=cbb0000004416(a3),a3=cbb0000004417(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004418(a3),a5=cbb0000004419(a1),a2=cbb0000004420(void a5)):(a2=cbb0000008219(allthis),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008220(a1)):s_cbb<oo3677?s_cbb<oo3678?(function(){debugger;}(),a1=cbb0000005127(a4),a2=cbb0000005128(a1),a1=a1>=a2,a2=cbb0000005129(a1)):(a1=cbb0000007774(a4),a2=cbb0000007775(a5),a1=a1-a2,a6=cbb0000007776(a1),a1=cbb0000007777(a2),a2=cbb0000007778(a1),a1=a1/a2,a3=cbb0000007779(a1)):(a1=cbb0000002973(a5),a2=cbb0000002974(a6),a1=a2!==a1,a1=cbb0000002975(a1),function(){debugger;}());}else if(s_cbb<oo3679){s_cbb<oo3680?s_cbb<oo3681?(a1=cbb0000005136(a2),a2=cbb0000005137(a3),a1=a1&a2,a3=cbb0000005138(a1),a1=cbb0000005139(a4),a2=cbb0000005140(a1),a1=a1>=a2,a2=cbb0000005141(a1)):(a5=cbb0000005409(a3),a4=cbb0000005410(!a5),a1=cbb0000005411(cbbb),a2=cbb0000005412(a1),a1=a1<=a2,a5=cbb0000005413(a1)):s_cbb<oo3682?(a1=cbb0000006864(a5),a2=cbb0000006865(a6),a1=a2*a1,a7=cbb0000006866(a1),a1=cbb0000006867(a5),a2=cbb0000006868(a6),a1=a2*a1,a7=cbb0000006869(a1)):(a1=cbb00000090(a3),a2=cbb00000091(a1),a1=a1 in a2,a1=cbb00000092(a1),a5=cbb00000093(a1),a2=cbb00000094(void a5));}else if(s_cbb<oo3683){if(s_cbb<oo3684){a1=cbb0000002225(cbbb);a2=cbb0000002226(a1);a1=a1<=a2;a5=cbb0000002227(a1);a1=cbb0000002228(a3);a2=cbb0000002229(a4);a3=cbb0000002230(a5);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;}else{a1=cbb0000007137(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007138(a5);a2=cbb0000007139(a6);a1=a2!==a1;a1=cbb0000007140(a1);}}else{a1=cbb0000004737(a5);a2=cbb0000004738(a9);a1=a2<a1;a6=cbb0000004739(a1);a1=cbb0000004740(a3);a2=cbb0000004741(a1);a2.push(a1);a1=cbb0000004742(a2);}}else if(s_cbb<oo3686){s_cbb<oo3687?s_cbb<oo3688?s_cbb<oo3689?(a1=cbb0000005676(a5),a2=cbb0000005677(a6),a1=a2*a1,a7=cbb0000005678(a1),a1=cbb0000005679(a6),a2=cbb0000005680(a1),a1=a1>>>a2,a2=cbb0000005681(a1)):(a1=cbb000000158(a2),a2=cbb000000159(a1),a1=a2-a1,a1=cbb000000160(a1),a1=cbb000000161(a3),a2=cbb000000162(a1),a1=a1 in a2,a1=cbb000000163(a1)):s_cbb<oo3690?(a5=cbb0000003589(a2),a2=cbb0000003590(-a5),a1=cbb0000003591(a2),a2=cbb0000003592(a1),a1=a1/a2,a3=cbb0000003593(a1)):(a1=cbb0000006674(a3),a2=cbb0000006675(a4),a1=a1%a2,a5=cbb0000006676(a1),function(){debugger;}()):s_cbb<oo3691?s_cbb<oo3692?(a1=cbb0000004475(a5),a2=cbb0000004476(a6),a1=a2*a1,a7=cbb0000004477(a1),a5=cbb0000004478(a3),a1=cbb0000004479(~a5)):(a5=cbb0000003723(a3),a1=cbb0000003724(typeof a5),a5=cbb0000003725(a1),a2=cbb0000003726(void a5)):(a2=cbb0000007304([]),a1=cbb0000007305(a3),a2=cbb0000007306(a1),a1=a1<a2,a1=cbb0000007307(a1));}else if(s_cbb<oo3693){if(s_cbb<oo3694){s_cbb<oo3695?(a1=cbb0000003727(a3),a2=cbb0000003728(a4),a1=a1%a2,a5=cbb0000003729(a1),a5=cbb0000003730(a3),a1=cbb0000003731(typeof a5)):(a1=cbb0000003614({}),a5=cbb0000003615(a2),a2=cbb0000003616(-a5));}else if(s_cbb<oo3696){a1=cbb0000006039(a6);a2=cbb0000006040(a7);a1=a1|a2;a8=cbb0000006041(a1);a1=cbb0000006042(a5);a2=cbb0000006043(a9);a1=a2<a1;a6=cbb0000006044(a1);}else{a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a5=cbb0000001256(a1);a2=cbb0000001257(void a5);}}else s_cbb<oo3703?s_cbb<oo3704?(a1=cbb0000007645(a2),a2=cbb0000007646(a1),a1=a1+a2,a1=cbb0000007647(a1),a1=cbb0000007648(a3),a2=cbb0000007649(a4),a1=a1%a2,a5=cbb0000007650(a1)):(a1=shuz[start++],a2=cbb0000007463(a3),a3=cbb0000007464(a1),a2[constantPool[a1]]=a3,a5=cbb0000007465(a1),a2=cbb0000007466(void a5)):(a1=cbb0000001635(a3),a2=cbb0000001636(a1),a1=a1<a2,a1=cbb0000001637(a1),function(){a1=cbb0000001638(a2);a2=cbb0000001639(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001640(a1);}());}else if(s_cbb<oo3705){if(s_cbb<oo3706){s_cbb<oo3707?s_cbb<oo3708?s_cbb<oo3709?(a1=cbb0000006243(a3),a2=cbb0000006244(a2),a1=a1<<a2,a1=cbb0000006245(a1),a1=cbb0000006246(a3),a2=cbb0000006247(a2),a1=a1<<a2,a1=cbb0000006248(a1)):(a1=cbb0000005876(a3),a2=cbb0000005877(a4),a1=a1>>a2,a5=cbb0000005878(a1),a1=cbb0000005879(a3),a2=cbb0000005880(a1),a1=a1<a2,a1=cbb0000005881(a1)):s_cbb<oo3710?(a1=cbb0000003574(a3),a2=cbb0000003575(a4),a1=a1%a2,a5=cbb0000003576(a1),a5=cbb0000003577(a2),a2=cbb0000003578(-a5)):(a1=cbb000000852(a5),a2=cbb000000853(a9),a1=a2<a1,a6=cbb000000854(a1),a1=cbb000000855(a2),a2=cbb000000856(a1),a1=a2-a1,a1=cbb000000857(a1)):s_cbb<oo3711?s_cbb<oo3712?(a1=cbb0000007203(a3),a2=cbb0000007204(a4),a1=a1>>a2,a5=cbb0000007205(a1),a1=cbb0000007206(a3),a2=cbb0000007207(a1),a1=a1<a2,a1=cbb0000007208(a1)):(a1=cbb0000006259(a3),a2=cbb0000006260(a4),a1=a1>>a2,a5=cbb0000006261(a1),a1=cbb0000006262(a3),a2=cbb0000006263(a2),a1=a1<<a2,a1=cbb0000006264(a1)):(function(){a1=shuz[start++];a3=cbb0000001798(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001799(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001800(a4);}(),a1=cbb0000001801(a3),a2=cbb0000001802(a2),a1=a1<<a2,a1=cbb0000001803(a1));}else if(s_cbb<oo3716){if(s_cbb<oo3717){if(s_cbb<oo3718){a1=cbb000000691();a2=cbb000000692();a1=a2!=a1;cbb000000693(a1);a1=cbb000000694(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000695(a1)):a9=oo4829;;}else{a1=cbb0000003328(a3);a3=shuz[start++];a1[constantPool[a3]]-=oo4828;a1=cbb0000003329(a3);a2=cbb0000003330(a4);a1=a1%a2;a5=cbb0000003331(a1);}}else s_cbb<oo3721?(a1=cbb0000006564(),a2=cbb0000006565(),a1=a2!=a1,cbb0000006566(a1),a1=cbb0000006567(a3),a2=cbb0000006568(a4),a1=a1^a2,a5=cbb0000006569(a1)):(a1=cbb0000005024(a2),a2=cbb0000005025(a1),a1=a1+a2,a1=cbb0000005026(a1),a1=cbb0000005027(a3),a2=cbb0000005028(a3),a1=a1>a2,a2=cbb0000005029(a1));}else s_cbb<oo3722?s_cbb<oo3723?(function(){a1=cbb0000002437(a2);throw a1;}(),a1=cbb0000002438(a2),a2=cbb0000002439(a3),a1=a1&a2,a3=cbb0000002440(a1)):(a1=cbb0000002475(a2),a2=cbb0000002476(a1),a1=a2===a1,a3=cbb0000002477(a1),function(){a1=cbb0000002478(a2);throw a1;}()):(a1=cbb0000007326(a3),a2=cbb0000007327(a1),a1=a1<a2,a1=cbb0000007328(a1),a1=cbb0000007329(a2),a2=cbb0000007330(a1),a1=a2-a1,a1=cbb0000007331(a1));}else if(s_cbb<oo3724){if(s_cbb<oo3725){if(s_cbb<oo3726){if(s_cbb<oo3727){a5=cbb0000003399(a1);a2=cbb0000003400(void a5);a1=cbb0000003401(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003402(a1)):a9=oo4829;;}else{(function(){a1=cbb0000002445(a2);throw a1;})();a2=cbb0000002446([]);}}else s_cbb<oo3729?(a1=cbb0000007185(a5),a2=cbb0000007186(a6),a1=a2*a1,a7=cbb0000007187(a1),a1=cbb0000007188(a3),a2=cbb0000007189(a1),a1=a1<a2,a1=cbb0000007190(a1)):(a1=cbb0000002723({}),a5=cbb0000002724(a1),a2=cbb0000002725(void a5));}else if(s_cbb<oo3730){if(s_cbb<oo3731){a1=cbb0000007074(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a5=cbb0000007075(a3);a4=cbb0000007076(!a5);}else{a1=cbb0000007746(a2);a2=cbb0000007747(a1);a1=a2==a1;a4=cbb0000007748(a1);a1=cbb0000007749(a4);a2=cbb0000007750(a5);a1=a1-a2;a6=cbb0000007751(a1);}}else{a1=shuz[start++];a2=cbb0000007435(a3);a3=cbb0000007436(a1);a2[constantPool[a1]]=a3;(function(){debugger;})();}}else s_cbb<oo3733?s_cbb<oo3734?s_cbb<oo3735?(a1=cbb0000005333(cbbb),a2=cbb0000005334(a1),a1=a1<=a2,a5=cbb0000005335(a1),a1=cbb0000005336(a2),a2=cbb0000005337(a1),a1=a2===a1,a3=cbb0000005338(a1)):(a5=cbb0000004582(a3),a1=cbb0000004583(~a5),a1=cbb0000004584(),a2=cbb0000004585(),a1=a2!=a1,cbb0000004586(a1)):(a1=cbb000000963(a5),a2=cbb000000964(a9),a1=a2<a1,a6=cbb000000965(a1),a1=cbb000000966(a2),a2=cbb000000967(a1),a1=a1+a2,a1=cbb000000968(a1)):s_cbb<oo3736?s_cbb<oo3737?(a1=cbb0000002748(a3),a2=cbb0000002749(a4),a1=a1^a2,a5=cbb0000002750(a1),a5=cbb0000002751(a1),a2=cbb0000002752(void a5)):(a1=cbb000000423(a3),a2=cbb000000424(a4),a1=a1>>a2,a5=cbb000000425(a1),a1=cbb000000426(a2),a2=cbb000000427(a1),a1=a2-a1,a1=cbb000000428(a1)):(a1=cbb0000003225(a2),a2=cbb0000003226(a1),a1=a2===a1,a3=cbb0000003227(a1),a5=cbb0000003228(a1),a2=cbb0000003229(void a5));}else if(s_cbb<oo3738){if(s_cbb<oo3739){if(s_cbb<oo3740){if(s_cbb<oo3741){s_cbb<oo3742?s_cbb<oo3743?s_cbb<oo3744?s_cbb<oo3745?(a1=cbb0000008081(a2),a2=cbb0000008082(a1),a1=a2-a1,a1=cbb0000008083(a1),a1=shuz[start++],a2=cbb0000008084(constantPool[a1])):(a1=cbb0000006208(a5),a2=cbb0000006209(a6),a1=a2*a1,a7=cbb0000006210(a1),a1=cbb0000006211(a3),a2=cbb0000006212(a2),a1=a1<<a2,a1=cbb0000006213(a1)):s_cbb<oo3746?(a1=cbb0000005609(a6),a2=cbb0000005610(a1),a1=a1>>>a2,a2=cbb0000005611(a1),a5=cbb0000005612(a3),a4=cbb0000005613(!a5)):(a1=cbb0000006741(a3),a2=cbb0000006742(a4),a1=a1%a2,a5=cbb0000006743(a1),a1=cbb0000006744(a2),a2=cbb0000006745(a3),a1=a1&a2,a3=cbb0000006746(a1)):s_cbb<oo3747?s_cbb<oo3748?(a2=cbb0000004049([]),a2=cbb0000004050(allthis)):(a1=cbb0000001324(a2),a2=cbb0000001325(a1),a1=a2===a1,a3=cbb0000001326(a1),function(){debugger;}()):(function(){debugger;}(),a1=cbb0000001297(a4),a2=cbb0000001298(a1),a1=a1>=a2,a2=cbb0000001299(a1)):s_cbb<oo3749?s_cbb<oo3750?s_cbb<oo3751?(a5=cbb0000002412(a3),a4=cbb0000002413(!a5),a1=cbb0000002414(a2),a2=cbb0000002415(a1),a1=a2==a1,a4=cbb0000002416(a1)):(a1=cbb0000004482(a6),a2=cbb0000004483(a1),a1=a1>>>a2,a2=cbb0000004484(a1),a5=cbb0000004485(a3),a1=cbb0000004486(~a5)):s_cbb<oo3752?(a1=cbb0000007810(a4),a2=cbb0000007811(a5),a1=a1-a2,a6=cbb0000007812(a1),a1=cbb0000007813(a2),a2=cbb0000007814(a1),a1=a2===a1,a3=cbb0000007815(a1)):(a1=cbb0000004529(a5),a2=cbb0000004530(a6),a1=a2!==a1,a1=cbb0000004531(a1),a5=cbb0000004532(a3),a1=cbb0000004533(~a5)):s_cbb<oo3753?s_cbb<oo3754?(a1=cbb0000001348(a2),a2=cbb0000001349(a1),a1=a2-a1,a1=cbb0000001350(a1),function(){debugger;}()):(a1=cbb00000054(a2),a2=cbb00000055(a1),a1=a1/a2,a3=cbb00000056(a1),a1=cbb00000057(a3),a2=cbb00000058(a1),a1=a1 in a2,a1=cbb00000059(a1)):(a5=cbb0000004514(a3),a1=cbb0000004515(~a5),a1=cbb0000004516(a5),a2=cbb0000004517(a9),a1=a2<a1,a6=cbb0000004518(a1));}else if(s_cbb<oo3755){s_cbb<oo3756?s_cbb<oo3757?s_cbb<oo3758?(a1=cbb0000008248(a2),a2=cbb0000008249(a1),a1=a2-a1,a1=cbb0000008250(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008251(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb0000002925(a3),a2=cbb0000002926(a1),a3=delete a2[a1],a1=cbb0000002927(a3),a1=cbb0000002928(a4),a2=cbb0000002929(a1),a1=a1>=a2,a2=cbb0000002930(a1)):s_cbb<oo3760?(a2=cbb0000001903([]),a1=cbb0000001904(a2),a2=cbb0000001905(a1),a1=a2-a1,a1=cbb0000001906(a1)):(a1=cbb0000007595(a2),a2=cbb0000007596(a1),a1=a1+a2,a1=cbb0000007597(a1),a2=cbb0000007598([])):s_cbb<oo3761?s_cbb<oo3762?(a1=cbb0000001457(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001458(a3),a2=cbb0000001459(a4),a1=a1>>a2,a5=cbb0000001460(a1)):(a1=cbb0000001921(a3),a2=cbb0000001922(a3),a1=a1>a2,a2=cbb0000001923(a1),a2=cbb0000001924([])):(a1=cbb0000003045(a2),a2=cbb0000003046(a1),a1=a1+a2,a1=cbb0000003047(a1),a1=cbb0000003048(a5),a2=cbb0000003049(a6),a1=a2!==a1,a1=cbb0000003050(a1));}else if(s_cbb<oo3764){s_cbb<oo3765?s_cbb<oo3766?(a5=cbb0000006523(a1),a2=cbb0000006524(void a5),a1=cbb0000006525(a3),a2=cbb0000006526(a4),a1=a1^a2,a5=cbb0000006527(a1)):(a1=cbb0000001469(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001470(a6),a2=cbb0000001471(a1),a1=a1>>>a2,a2=cbb0000001472(a1)):s_cbb<oo3768?(a1=cbb0000007014(a2),a2=cbb0000007015(a1),a1=a1+a2,a1=cbb0000007016(a1),a1=cbb0000007017(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):(a5=cbb0000005910(a3),a4=cbb0000005911(!a5),a1=cbb0000005912(a3),a2=cbb0000005913(a4),a1=a1>>a2,a5=cbb0000005914(a1));}else if(s_cbb<oo3770){if(s_cbb<oo3771){a1=cbb0000003454(a5);a2=cbb0000003455(a6);a1=a2!==a1;a1=cbb0000003456(a1);a1=cbb0000003457(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003458(a1)):a9=oo4829;;}else{a1=cbb0000007984(a2);a2=cbb0000007985(a1);a1=a2===a1;a3=cbb0000007986(a1);a1=shuz[start++];a2=cbb0000007987(a1);}}else{a1=shuz[start++];a2=cbb0000008045(constantPool[a1]);a1=cbb0000008046(a2);a2=cbb0000008047(a1);a1=a1/a2;a3=cbb0000008048(a1);}}else if(s_cbb<oo3773){s_cbb<oo3774?s_cbb<oo3775?s_cbb<oo3776?s_cbb<oo3777?(a1=cbb0000003272(a2),a2=cbb0000003273(a1),a1=a2===a1,a3=cbb0000003274(a1),a1=cbb0000003275(a6),a2=cbb0000003276(a1),a1=a1>>>a2,a2=cbb0000003277(a1)):(a1=cbb0000004433(a5),a2=cbb0000004434(a6),a1=a2*a1,a7=cbb0000004435(a1),a1=cbb0000004436(a2),a2=cbb0000004437(a3),a3=cbb0000004438(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004439(a3)):s_cbb<oo3778?(a1=cbb0000006137(a3),a2=cbb0000006138(a2),a1=a1<<a2,a1=cbb0000006139(a1),a1=cbb0000006140(a2),a2=cbb0000006141(a1),a1=a2===a1,a3=cbb0000006142(a1)):(a1=cbb0000003667({}),a5=cbb0000003668(a2),a2=cbb0000003669(-a5)):s_cbb<oo3779?s_cbb<oo3780?(a1=cbb0000005034(a2),a2=cbb0000005035(a1),a1=a2===a1,a3=cbb0000005036(a1),a1=cbb0000005037(a3),a2=cbb0000005038(a3),a1=a1>a2,a2=cbb0000005039(a1)):(a1=cbb0000004350(a2),a2=cbb0000004351(a1),a1=a1/a2,a3=cbb0000004352(a1),a1=cbb0000004353(a2),a2=cbb0000004354(a3),a3=cbb0000004355(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004356(a3)):(function(){a1=shuz[start++];a3=cbb0000001821(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001822(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001823(a4);}(),a1=cbb0000001824(a2),a2=cbb0000001825(a1),a1=a2-a1,a1=cbb0000001826(a1)):s_cbb<oo3784?s_cbb<oo3785?s_cbb<oo3786?(a1=cbb000000312(a6),a2=cbb000000313(a1),a1=a1>>>a2,a2=cbb000000314(a1),a1=cbb000000315(),a2=cbb000000316(),a1=a2!=a1,cbb000000317(a1)):(a1=cbb0000002741(a5),a2=cbb0000002742(a6),a1=a2*a1,a7=cbb0000002743(a1),a5=cbb0000002744(a1),a2=cbb0000002745(void a5)):s_cbb<oo3787?(a1=cbb000000101(a3),a2=cbb000000102(a1),a1=a1 in a2,a1=cbb000000103(a1),a1=cbb000000104(),a2=cbb000000105(),a1=a2!=a1,cbb000000106(a1)):(function(){a1=cbb0000002506(a2);throw a1;}(),a1=cbb0000002507(a2),a2=cbb0000002508(a1),a1=a2-a1,a1=cbb0000002509(a1)):s_cbb<oo3788?s_cbb<oo3789?(function(){i=[];a2=cbb0000002589(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002590(a2),a2=cbb0000002591(a1),a1=a2===a1,a3=cbb0000002592(a1)):(a2=cbb0000007410(allthis),all=cbbb,a1=cbb0000007411(cbbb)):(a1=cbb0000003288(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003289(a2),a2=cbb0000003290(a1),a1=a1+a2,a1=cbb0000003291(a1));}else if(s_cbb<oo3791){s_cbb<oo3792?s_cbb<oo3793?s_cbb<oo3794?(a1=cbb0000006852(a3),a2=cbb0000006853(a1),a1=a1<a2,a1=cbb0000006854(a1),a1=cbb0000006855(a5),a2=cbb0000006856(a6),a1=a2*a1,a7=cbb0000006857(a1)):(a1=cbb0000006967(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000006968(a2),a2=cbb0000006969(a1),a1=a2==a1,a4=cbb0000006970(a1)):s_cbb<oo3796?(a5=cbb0000001804(a1),a2=cbb0000001805(void a5),function(){a1=shuz[start++];a3=cbb0000001806(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001807(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001808(a4);}()):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008119(a1),a1=cbb0000008120(a4),a2=cbb0000008121(a1),a1=a1>=a2,a2=cbb0000008122(a1)):s_cbb<oo3800?s_cbb<oo3801?(function(){debugger;}(),a1=cbb0000007215(a3),a2=cbb0000007216(a1),a1=a1<a2,a1=cbb0000007217(a1)):(a1=cbb0000002885(a2),a2=cbb0000002886(a1),a1=a1+a2,a1=cbb0000002887(a1),a1=cbb0000002888(a3),a2=cbb0000002889(a1),a3=delete a2[a1],a1=cbb0000002890(a3)):(a1=shuz[start++],a2=cbb0000007543(a3),a3=cbb0000007544(a1),a2[constantPool[a1]]=a3,a1=cbb0000007545(a6),a2=cbb0000007546(a1),a1=a1>>>a2,a2=cbb0000007547(a1));}else if(s_cbb<oo3802){if(s_cbb<oo3803){s_cbb<oo3804?(a1=cbb0000003260(a2),a2=cbb0000003261(a1),a1=a2===a1,a3=cbb0000003262(a1),a1=cbb0000003263(a2),a2=cbb0000003264(a3),a1=a1&a2,a3=cbb0000003265(a1)):(a1=cbb0000005450(a4),a2=cbb0000005451(a5),a1=a1-a2,a6=cbb0000005452(a1),a1=cbb0000005453(a2),a2=cbb0000005454(a3),a1=a1&a2,a3=cbb0000005455(a1));}else if(s_cbb<oo3805){a1=cbb0000007077(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007078(a2);a2=cbb0000007079(a1);a1=a2-a1;a1=cbb0000007080(a1);}else{a1=cbb0000004950();a2=cbb0000004951();a1=a2!=a1;cbb0000004952(a1);a1=cbb0000004953(a3);a2=cbb0000004954(a3);a1=a1>a2;a2=cbb0000004955(a1);}}else s_cbb<oo3807?s_cbb<oo3808?(a1=cbb0000002080({}),a1=cbb0000002081(a2),a2=cbb0000002082(a1),a1=a2==a1,a4=cbb0000002083(a1)):(a1=cbb0000005731(a6),a2=cbb0000005732(a1),a1=a1>>>a2,a2=cbb0000005733(a1),a1=cbb0000005734(a4),a2=cbb0000005735(a1),a1=a1>=a2,a2=cbb0000005736(a1)):(a5=cbb0000002753(a1),a2=cbb0000002754(void a5),a1=cbb0000002755(a6),a2=cbb0000002756(a7),a1=a1|a2,a8=cbb0000002757(a1));}else if(s_cbb<oo3809){if(s_cbb<oo3810){if(s_cbb<oo3811){s_cbb<oo3812?s_cbb<oo3813?s_cbb<oo3814?(a1=cbb0000002346(a3),a2=cbb0000002347(a4),a1=a1>>a2,a5=cbb0000002348(a1),a5=cbb0000002349(a3),a4=cbb0000002350(!a5)):(a1=cbb0000002613({}),function(){i=[];a2=cbb0000002614(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}()):s_cbb<oo3815?(a1=cbb000000463(a3),a2=cbb000000464(a4),a1=a1^a2,a5=cbb000000465(a1),a1=cbb000000466(a2),a2=cbb000000467(a1),a1=a2-a1,a1=cbb000000468(a1)):(a1=cbb0000004962(a2),a2=cbb0000004963(a1),a1=a2===a1,a3=cbb0000004964(a1),a1=cbb0000004965(a3),a2=cbb0000004966(a3),a1=a1>a2,a2=cbb0000004967(a1)):s_cbb<oo3816?s_cbb<oo3817?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008237(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008238(a3),a2=cbb0000008239(a4),a1=a1^a2,a5=cbb0000008240(a1)):(a1=cbb0000002842(cbbb),a2=cbb0000002843(a1),a1=a1<=a2,a5=cbb0000002844(a1),a1=cbb0000002845(a3),a2=cbb0000002846(a1),a3=delete a2[a1],a1=cbb0000002847(a3)):(a1=shuz[start++],a2=cbb0000007912(a1),function(){debugger;}());}else if(s_cbb<oo3819){if(s_cbb<oo3820){s_cbb<oo3821?(a2=cbb0000001521(allthis),a1=shuz[start++],start+=a1):(a1=cbb0000002516(a2),a2=cbb0000002517(a1),a1=a2===a1,a3=cbb0000002518(a1),function(){a1=cbb0000002519(a2);throw a1;}());}else if(s_cbb<oo3822){a2=cbb0000008101(allthis);a1=shuz[start++];a2=cbb0000008102(constantPool[a1]);}else{a1=cbb0000001021(a3);a2=cbb0000001022(a1);a1=a1<a2;a1=cbb0000001023(a1);return;}}else s_cbb<oo3823?s_cbb<oo3824?(a1=cbb0000006366(a3),a2=cbb0000006367(a4),a1=a1%a2,a5=cbb0000006368(a1),a1=cbb0000006369(a2),a2=cbb0000006370(a1),a1=a1/a2,a3=cbb0000006371(a1)):(a1=cbb000000516(a5),a2=cbb000000517(a6),a1=a2!==a1,a1=cbb000000518(a1),a1=cbb000000519(a5),a2=cbb000000520(a3),a3=cbb000000521(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a5=cbb0000003759(a3),a1=cbb0000003760(typeof a5),a5=cbb0000003761(a1),a2=cbb0000003762(void a5));}else if(s_cbb<oo3825){if(s_cbb<oo3826){s_cbb<oo3827?s_cbb<oo3828?(j=cbb0000001073(a1),j2=cbb0000001074(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a5=cbb0000001075(a1),a2=cbb0000001076(void a5)):(function(){debugger;}(),a1=cbb0000003342(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):s_cbb<oo3830?(all=cbbb,a1=cbb0000007336(cbbb),a1=cbb0000007337({})):(a1=cbb0000006406(a2),a2=cbb0000006407(a1),a1=a1/a2,a3=cbb0000006408(a1),a1=cbb0000006409(a5),a2=cbb0000006410(a9),a1=a2<a1,a6=cbb0000006411(a1));}else if(s_cbb<oo3831){if(s_cbb<oo3832){a1=cbb0000008431(a5);a2=cbb0000008432(a6);a1=a2!==a1;a1=cbb0000008433(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008434(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}else{a1=cbb0000002202(a3);a2=cbb0000002203(a4);a3=cbb0000002204(a5);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000002205(a3);a2=cbb0000002206(a1);a1=a1<a2;a1=cbb0000002207(a1);}}else{a1=cbb0000007693();a2=cbb0000007694();a1=a2!=a1;cbb0000007695(a1);a1=cbb0000007696(a2);a2=cbb0000007697(a1);a1=a1+a2;a1=cbb0000007698(a1);}}else s_cbb<oo3844?s_cbb<oo3845?s_cbb<oo3846?(a1=cbb00000095(a2),a2=cbb00000096(a3),a1=a1&a2,a3=cbb00000097(a1),a1=cbb00000098(a3),a2=cbb00000099(a1),a1=a1 in a2,a1=cbb000000100(a1)):(function(){a1=cbb0000001629(a2);a2=cbb0000001630(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001631(a1);}(),a1=cbb0000001632(a5),a2=cbb0000001633(a9),a1=a2<a1,a6=cbb0000001634(a1)):s_cbb<oo3847?(a5=cbb0000001546(a3),a4=cbb0000001547(!a5),function(){a1=cbb0000001548(a2);a2=cbb0000001549(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001550(a1);}()):(a1=cbb000000146(a3),a2=cbb000000147(a1),a1=a1 in a2,a1=cbb000000148(a1),a1=cbb000000149(a2),a2=cbb000000150(a3),a1=a1&a2,a3=cbb000000151(a1)):s_cbb<oo3848?s_cbb<oo3849?(a1=cbb0000004572(a2),a2=cbb0000004573(a1),a1=a2==a1,a4=cbb0000004574(a1),a5=cbb0000004575(a3),a1=cbb0000004576(~a5)):(a1=cbb0000005103(a3),a2=cbb0000005104(a3),a1=a1>a2,a2=cbb0000005105(a1),a1=cbb0000005106(a4),a2=cbb0000005107(a1),a1=a1>=a2,a2=cbb0000005108(a1)):(a1=shuz[start++],a2=cbb0000007976(a1),a1=cbb0000007977(a4),a2=cbb0000007978(a5),a1=a1-a2,a6=cbb0000007979(a1));}else if(s_cbb<oo3850){if(s_cbb<oo3851){if(s_cbb<oo3852){if(s_cbb<oo3853){if(s_cbb<oo3854){a1=cbb0000003464(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003465(a1)):a9=oo4829;;a1=cbb0000003466(a5);a2=cbb0000003467(a6);a1=a2*a1;a7=cbb0000003468(a1);}else{a1=cbb0000005085(a2);a2=cbb0000005086(a1);a1=a1/a2;a3=cbb0000005087(a1);a1=cbb0000005088(a3);a2=cbb0000005089(a3);a1=a1>a2;a2=cbb0000005090(a1);}}else s_cbb<oo3856?(a1=cbb0000005991(a4),a2=cbb0000005992(a5),a1=a1-a2,a6=cbb0000005993(a1),a1=cbb0000005994(a6),a2=cbb0000005995(a7),a1=a1|a2,a8=cbb0000005996(a1)):(a5=cbb0000003670(a3),a1=cbb0000003671(typeof a5),a1=cbb0000003672(cbbb),a2=cbb0000003673(a1),a1=a1<=a2,a5=cbb0000003674(a1));}else s_cbb<oo3857?s_cbb<oo3858?(function(){debugger;}(),a1=cbb0000001330(a3),a2=cbb0000001331(a1),a1=a1<a2,a1=cbb0000001332(a1)):(a1=cbb0000007929(a2),a2=cbb0000007930(a3),a1=a1&a2,a3=cbb0000007931(a1),a1=shuz[start++],a2=cbb0000007932(a1)):(function(){a1=cbb0000001623(a2);a2=cbb0000001624(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001625(a1);}(),a1=cbb0000001626(a2),a2=cbb0000001627(a1),a1=a2==a1,a4=cbb0000001628(a1));}else s_cbb<oo3859?s_cbb<oo3860?s_cbb<oo3861?(a1=cbb0000002768(a3),a2=cbb0000002769(a2),a1=a1<<a2,a1=cbb0000002770(a1),a5=cbb0000002771(a1),a2=cbb0000002772(void a5)):(function(){debugger;}(),a5=cbb0000002304(a3),a4=cbb0000002305(!a5)):s_cbb<oo3862?(a1=cbb0000001530(a5),a2=cbb0000001531(a6),a1=a2*a1,a7=cbb0000001532(a1),a1=shuz[start++],start+=a1):(a1=cbb0000005159(a4),a2=cbb0000005160(a1),a1=a1>=a2,a2=cbb0000005161(a1),a1=cbb0000005162(a2),a2=cbb0000005163(a1),a1=a1/a2,a3=cbb0000005164(a1)):s_cbb<oo3863?s_cbb<oo3864?(a5=cbb0000002376(a3),a4=cbb0000002377(!a5),a1=cbb0000002378(a3),a2=cbb0000002379(a2),a1=a1<<a2,a1=cbb0000002380(a1)):(a1=cbb0000005073(a3),a2=cbb0000005074(a4),a1=a1%a2,a5=cbb0000005075(a1),a1=cbb0000005076(a3),a2=cbb0000005077(a3),a1=a1>a2,a2=cbb0000005078(a1)):(a1=cbb0000006677(a3),a2=cbb0000006678(a4),a1=a1%a2,a5=cbb0000006679(a1),function(){debugger;}());}else if(s_cbb<oo3865){if(s_cbb<oo3866){if(s_cbb<oo3867){if(s_cbb<oo3868){a1=cbb0000001264(a3);a2=cbb0000001265(a4);a1=a1>>a2;a5=cbb0000001266(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}else{a1=cbb000000819(a5);a2=cbb000000820(a6);a1=a2*a1;a7=cbb000000821(a1);a1=cbb000000822(a5);a2=cbb000000823(a9);a1=a2<a1;a6=cbb000000824(a1);}}else s_cbb<oo3875?(a5=cbb0000001425(a1),a2=cbb0000001426(void a5),a1=cbb0000001427(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):(a1=cbb0000005898(a3),a2=cbb0000005899(a4),a1=a1>>a2,a5=cbb0000005900(a1),a1=cbb0000005901(a3),a2=cbb0000005902(a2),a1=a1<<a2,a1=cbb0000005903(a1));}else if(s_cbb<oo3877){if(s_cbb<oo3878){a1=cbb0000004635(a3);a2=cbb0000004636(a1);a2.push(a1);a1=cbb0000004637(a2);a1=cbb0000004638(cbbb);a2=cbb0000004639(a1);a1=a1<=a2;a5=cbb0000004640(a1);}else{a1=cbb0000007063(a2);a2=cbb0000007064(a1);a1=a1+a2;a1=cbb0000007065(a1);a1=cbb0000007066(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else{a1=cbb000000719(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000720(a1)):a9=oo4829;;(function(){debugger;})();}}else if(s_cbb<oo3881){if(s_cbb<oo3882){if(s_cbb<oo3883){a1=cbb0000008353(a3);a2=cbb0000008354(a3);a1=a1>a2;a2=cbb0000008355(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008356(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}else{a1=cbb0000001522(a2);a2=cbb0000001523(a1);a1=a2===a1;a3=cbb0000001524(a1);a1=shuz[start++];start+=a1;}}else{a1=cbb0000003484(a2);a2=cbb0000003485(a3);a1=a1&a2;a3=cbb0000003486(a1);a1=cbb0000003487(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003488(a1)):a9=oo4829;;}}else s_cbb<oo3896?s_cbb<oo3897?(a1=cbb0000004926(a3),a2=cbb0000004927(a1),a1=a1<a2,a1=cbb0000004928(a1),a1=cbb0000004929(a3),a2=cbb0000004930(a3),a1=a1>a2,a2=cbb0000004931(a1)):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008163(a1),a1=cbb0000008164(a6),a2=cbb0000008165(a1),a1=a1>>>a2,a2=cbb0000008166(a1)):(a1=cbb0000003347(a2),a2=cbb0000003348(a1),a1=a2===a1,a3=cbb0000003349(a1),a1=cbb0000003350(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828);}else if(s_cbb<oo3899){if(s_cbb<oo3900){if(s_cbb<oo3901){if(s_cbb<oo3902){if(s_cbb<oo3903){if(s_cbb<oo3904){s_cbb<oo3905?(function(){a1=cbb0000002528(a2);throw a1;}(),a1=cbb0000002529(),a2=cbb0000002530(),a1=a2!=a1,cbb0000002531(a1)):(function(){debugger;}(),a1=cbb0000001338(a3),a2=cbb0000001339(a4),a1=a1^a2,a5=cbb0000001340(a1));}else if(s_cbb<oo3906){a1=cbb0000004332(a2);a2=cbb0000004333(a3);a3=cbb0000004334(a4);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000004335(a3);a1=cbb0000004336(a4);a2=cbb0000004337(a5);a1=a1-a2;a6=cbb0000004338(a1);}else{a1=cbb000000779(a5);a2=cbb000000780(a6);a1=a2!==a1;a1=cbb000000781(a1);a1=cbb000000782(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000783(a1)):a9=oo4829;;}}else s_cbb<oo3908?s_cbb<oo3909?(a1=cbb0000002524(a3),a2=cbb0000002525(a4),a1=a1%a2,a5=cbb0000002526(a1),function(){a1=cbb0000002527(a2);throw a1;}()):(a1=cbb0000007269(a3),a2=cbb0000007270(a1),a1=a1<a2,a1=cbb0000007271(a1),a1=cbb0000007272(a2),a2=cbb0000007273(a1),a1=a2===a1,a3=cbb0000007274(a1)):(a1=cbb0000002642(a5),a2=cbb0000002643(a6),a1=a2!==a1,a1=cbb0000002644(a1),a5=cbb0000002645(a1),a2=cbb0000002646(void a5));}else if(s_cbb<oo3910){if(s_cbb<oo3911){s_cbb<oo3912?(a1=cbb0000003351(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003352(a2),a2=cbb0000003353(a1),a1=a1+a2,a1=cbb0000003354(a1)):(j=cbb0000001167(a1),j2=cbb0000001168(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001169(a3),a2=cbb0000001170(a1),a1=a1<a2,a1=cbb0000001171(a1));}else if(s_cbb<oo3914){(function(){a1=cbb0000002427(a2);throw a1;})();a1=cbb0000002428(a2);a2=cbb0000002429(a3);a1=a1&a2;a3=cbb0000002430(a1);}else{a1=cbb0000001045(a5);a2=cbb0000001046(a9);a1=a2<a1;a6=cbb0000001047(a1);return;}}else s_cbb<oo3915?s_cbb<oo3916?(a1=cbb0000002208(a3),a2=cbb0000002209(a4),a3=cbb0000002210(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a5=cbb0000002211(a1),a2=cbb0000002212(void a5)):(a1=cbb0000002609(a2),a2=cbb0000002610(a3),a1=a1&a2,a3=cbb0000002611(a1),function(){i=[];a2=cbb0000002612(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}()):(a1=cbb0000007618(a2),a2=cbb0000007619(a1),a1=a2-a1,a1=cbb0000007620(a1),a1=cbb0000007621(a2),a2=cbb0000007622(a1),a1=a1+a2,a1=cbb0000007623(a1));}else if(s_cbb<oo3917){if(s_cbb<oo3918){if(s_cbb<oo3919){if(s_cbb<oo3920){a1=cbb0000001036(a5);a2=cbb0000001037(a6);a1=a2*a1;a7=cbb0000001038(a1);return;}else{a1=cbb0000001432(a2);a3=shuz[start++];a1?start+=a3:a9=oo4820;a1=cbb0000001433(a4);a2=cbb0000001434(a1);a1=a1>=a2;a2=cbb0000001435(a1);}}else s_cbb<oo3922?(function(){i=[];a2=cbb0000002629(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002630(a3),a2=cbb0000002631(a3),a1=a1>a2,a2=cbb0000002632(a1)):(a1=cbb0000004557(a2),a2=cbb0000004558(a1),a1=a2-a1,a1=cbb0000004559(a1),a5=cbb0000004560(a3),a1=cbb0000004561(~a5));}else s_cbb<oo3923?s_cbb<oo3924?(a1=cbb0000007603(a2),a2=cbb0000007604(a1),a1=a1+a2,a1=cbb0000007605(a1),a1=cbb0000007606(a2),a2=cbb0000007607(a1),a1=a1+a2,a1=cbb0000007608(a1)):(a1=cbb000000898(a5),a2=cbb000000899(a9),a1=a2<a1,a6=cbb000000900(a1),a1=cbb000000901(a2),a2=cbb000000902(a1),a1=a1/a2,a3=cbb000000903(a1)):(a1=cbb0000008131(a5),a2=cbb0000008132(a6),a1=a2*a1,a7=cbb0000008133(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008134(a1));}else s_cbb<oo3925?s_cbb<oo3926?s_cbb<oo3927?(a1=cbb000000300(),a2=cbb000000301(),a1=a2!=a1,cbb000000302(a1),a1=cbb000000303(a3),a2=cbb000000304(a4),a1=a1%a2,a5=cbb000000305(a1)):(a1=cbb0000002563(a5),a2=cbb0000002564(a9),a1=a2<a1,a6=cbb0000002565(a1),function(){i=[];a2=cbb0000002566(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}()):s_cbb<oo3928?(a1=cbb0000003102(a5),a2=cbb0000003103(a6),a1=a2!==a1,a1=cbb0000003104(a1),a1=cbb0000003105(a3),a2=cbb0000003106(a4),a1=a1^a2,a5=cbb0000003107(a1)):(a2=cbb0000006061([]),a1=cbb0000006062(a6),a2=cbb0000006063(a7),a1=a1|a2,a8=cbb0000006064(a1)):s_cbb<oo3929?s_cbb<oo3930?(a1=shuz[start++],a2=cbb0000008061(constantPool[a1]),a1=cbb0000008062(a2),a2=cbb0000008063(a1),a1=a2-a1,a1=cbb0000008064(a1)):(a1=cbb0000006372(a2),a2=cbb0000006373(a1),a1=a1/a2,a3=cbb0000006374(a1),a1=cbb0000006375(a6),a2=cbb0000006376(a1),a1=a1>>>a2,a2=cbb0000006377(a1)):(a1=cbb0000001786(a6),a2=cbb0000001787(a7),a1=a1|a2,a8=cbb0000001788(a1),function(){a1=shuz[start++];a3=cbb0000001789(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001790(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001791(a4);}());}else if(s_cbb<oo3934){s_cbb<oo3935?s_cbb<oo3936?s_cbb<oo3937?s_cbb<oo3938?(a1=cbb0000003853(a2),a2=cbb0000003854(a1),a1=a2==a1,a4=cbb0000003855(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003856()):a4.splice(oo4806,oo4806,cbb0000003857());}a1=cbb0000003858(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a1=cbb000000549(a5),a2=cbb000000550(a3),a3=cbb000000551(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000552(a3),a2=cbb000000553(a2),a1=a1<<a2,a1=cbb000000554(a1)):s_cbb<oo3948?(a1=cbb0000007589(a2),a2=cbb0000007590(a1),a1=a2-a1,a1=cbb0000007591(a1),a1=cbb0000007592(a2),a2=cbb0000007593(a1),a1=a1+a2,a1=cbb0000007594(a1)):(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003895()):a4.splice(oo4806,oo4806,cbb0000003896());}a1=cbb0000003897(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003898(a2),a2=cbb0000003899(a1),a1=a2-a1,a1=cbb0000003900(a1)):s_cbb<oo3958?s_cbb<oo3959?(a1=cbb0000006920(a5),a2=cbb0000006921(a6),a1=a2*a1,a7=cbb0000006922(a1),a1=cbb0000006923(a4),a2=cbb0000006924(a1),a1=a1>=a2,a2=cbb0000006925(a1)):(a1=cbb0000006753(a3),a2=cbb0000006754(a4),a1=a1^a2,a5=cbb0000006755(a1),a1=cbb0000006756(a3),a2=cbb0000006757(a4),a1=a1%a2,a5=cbb0000006758(a1)):(a1=cbb0000006055(a6),a2=cbb0000006056(a7),a1=a1|a2,a8=cbb0000006057(a1),a1=cbb0000006058(a3),a2=cbb0000006059(a3),a1=a1>a2,a2=cbb0000006060(a1)):s_cbb<oo3960?s_cbb<oo3961?s_cbb<oo3962?(a1=cbb0000005379(a5),a2=cbb0000005380(a9),a1=a2<a1,a6=cbb0000005381(a1),a1=cbb0000005382(cbbb),a2=cbb0000005383(a1),a1=a1<=a2,a5=cbb0000005384(a1)):(a1=cbb0000007568(a6),a2=cbb0000007569(a1),a1=a1>>>a2,a2=cbb0000007570(a1),a1=cbb0000007571(a2),a2=cbb0000007572(a1),a1=a1+a2,a1=cbb0000007573(a1)):s_cbb<oo3963?(a1=cbb0000004534(a5),a2=cbb0000004535(a6),a1=a2!==a1,a1=cbb0000004536(a1),a5=cbb0000004537(a3),a1=cbb0000004538(~a5)):(a1=cbb0000005858(a3),a2=cbb0000005859(a4),a1=a1>>a2,a5=cbb0000005860(a1),a1=cbb0000005861(a3),a2=cbb0000005862(a1),a1=a1<a2,a1=cbb0000005863(a1)):s_cbb<oo3964?s_cbb<oo3965?(a1=cbb000000489(a2),a2=cbb000000490(a1),a1=a2-a1,a1=cbb000000491(a1),a1=cbb000000492(a3),a2=cbb000000493(a2),a1=a1<<a2,a1=cbb000000494(a1)):(a1=cbb0000005830(a4),a2=cbb0000005831(a1),a1=a1>=a2,a2=cbb0000005832(a1),a1=cbb0000005833(a3),a2=cbb0000005834(a4),a1=a1>>a2,a5=cbb0000005835(a1)):(function(){debugger;}(),a1=cbb0000002011({}));}else if(s_cbb<oo3966){if(s_cbb<oo3967){if(s_cbb<oo3968){s_cbb<oo3969?(a1=cbb0000005130(a4),a2=cbb0000005131(a1),a1=a1>=a2,a2=cbb0000005132(a1),a1=cbb0000005133(a3),a2=cbb0000005134(a4),a1=a1>>a2,a5=cbb0000005135(a1)):(a1=cbb0000002128(a3),a2=cbb0000002129(a4),a3=cbb0000002130(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002131(a3),a2=cbb0000002132(a4),a1=a1^a2,a5=cbb0000002133(a1));}else if(s_cbb<oo3970){a1=cbb000000201(a2);a2=cbb000000202(a1);a1=a1/a2;a3=cbb000000203(a1);a1=cbb000000204();a2=cbb000000205();a1=a2!=a1;cbb000000206(a1);}else{a1=cbb0000001285(a2);a2=cbb0000001286(a1);a1=a1+a2;a1=cbb0000001287(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else if(s_cbb<oo3977){s_cbb<oo3978?(a1=cbb0000005964(a2),a2=cbb0000005965(a3),a1=a1&a2,a3=cbb0000005966(a1),a1=cbb0000005967(a6),a2=cbb0000005968(a7),a1=a1|a2,a8=cbb0000005969(a1)):(a1=cbb0000004871(a2),a2=cbb0000004872(a1),a1=a2==a1,a4=cbb0000004873(a1),a1=cbb0000004874(a2),a2=cbb0000004875(a1),a1=a2==a1,a4=cbb0000004876(a1));}else{a1=cbb0000001018();a2=cbb0000001019();a1=a2!=a1;cbb0000001020(a1);return;}}else s_cbb<oo3979?s_cbb<oo3980?s_cbb<oo3981?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008302(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008303(a2),a2=cbb0000008304(a1),a1=a2===a1,a3=cbb0000008305(a1)):(a1=cbb0000006834(a3),a2=cbb0000006835(a1),a1=a1<a2,a1=cbb0000006836(a1),a1=cbb0000006837(a5),a2=cbb0000006838(a6),a1=a2*a1,a7=cbb0000006839(a1)):(a1=cbb0000002736(a2),a2=cbb0000002737(a1),a1=a1+a2,a1=cbb0000002738(a1),a5=cbb0000002739(a1),a2=cbb0000002740(void a5)):s_cbb<oo3983?s_cbb<oo3984?(a1=cbb0000005904(a2),a2=cbb0000005905(a1),a1=a1/a2,a3=cbb0000005906(a1),a1=cbb0000005907(a3),a2=cbb0000005908(a4),a1=a1>>a2,a5=cbb0000005909(a1)):(a1=cbb0000004701(a3),a2=cbb0000004702(a1),a2.push(a1),a1=cbb0000004703(a2),a1=cbb0000004704(a4),a2=cbb0000004705(a1),a1=a1>=a2,a2=cbb0000004706(a1)):(a5=cbb0000003708(a3),a1=cbb0000003709(typeof a5),a1=cbb0000003710(a4),a2=cbb0000003711(a5),a1=a1-a2,a6=cbb0000003712(a1));}else if(s_cbb<oo3985){if(s_cbb<oo3986){if(s_cbb<oo3987){s_cbb<oo3988?s_cbb<oo3989?s_cbb<oo3990?(a1=shuz[start++],a2=cbb0000007470(a3),a3=cbb0000007471(a1),a2[constantPool[a1]]=a3,a1=cbb0000007472(),a2=cbb0000007473(),a1=a2!=a1,cbb0000007474(a1)):(a1=cbb000000266(),a2=cbb000000267(),a1=a2!=a1,cbb000000268(a1),a1=cbb000000269(a4),a2=cbb000000270(a1),a1=a1>=a2,a2=cbb000000271(a1)):s_cbb<oo3991?(a5=cbb0000002410(a3),a4=cbb0000002411(!a5),function(){debugger;}()):(a5=cbb0000003530(a3),a4=cbb0000003531(!a5),a5=cbb0000003532(a2),a2=cbb0000003533(-a5)):s_cbb<oo3992?s_cbb<oo3993?(a1=cbb0000006319(a2),a2=cbb0000006320(a1),a1=a1/a2,a3=cbb0000006321(a1),a1=cbb0000006322(a4),a2=cbb0000006323(a1),a1=a1>=a2,a2=cbb0000006324(a1)):(a5=cbb0000003636(a2),a2=cbb0000003637(-a5),a1=cbb0000003638(a2),a2=cbb0000003639(a1),a1=a1+a2,a1=cbb0000003640(a1)):(a1=cbb0000001587(a3),a2=cbb0000001588(a4),a1=a1%a2,a5=cbb0000001589(a1),function(){a1=cbb0000001590(a2);a2=cbb0000001591(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001592(a1);}());}else if(s_cbb<oo3994){s_cbb<oo3995?s_cbb<oo3996?(j=cbb0000001129(a1),j2=cbb0000001130(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001131(a3),a2=cbb0000001132(a4),a1=a1>>a2,a5=cbb0000001133(a1)):(a1=cbb0000005605({}),a1=cbb0000005606(a6),a2=cbb0000005607(a1),a1=a1>>>a2,a2=cbb0000005608(a1)):s_cbb<oo3997?(a1=cbb0000006632(a2),a2=cbb0000006633(a1),a1=a1/a2,a3=cbb0000006634(a1),a1=cbb0000006635(a3),a2=cbb0000006636(a4),a1=a1%a2,a5=cbb0000006637(a1)):(a2=cbb0000004030(allthis),a1=cbb0000004031(cbbb),a2=cbb0000004032(a1),a1=a1<=a2,a5=cbb0000004033(a1));}else if(s_cbb<oo3998){if(s_cbb<oo3999){a1=cbb0000006412(a2);a2=cbb0000006413(a1);a1=a1/a2;a3=cbb0000006414(a1);a1=cbb0000006415(a3);a2=cbb0000006416(a4);a1=a1%a2;a5=cbb0000006417(a1);}else{a1=cbb0000003510(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003511(a1)):a9=oo4829;;a1=cbb0000003512(a6);a2=cbb0000003513(a7);a1=a1|a2;a8=cbb0000003514(a1);}}else{a2=cbb0000007870([]);a1=cbb0000007871(a4);a2=cbb0000007872(a5);a1=a1-a2;a6=cbb0000007873(a1);}}else if(s_cbb<oo4001){if(s_cbb<oo4002){if(s_cbb<oo4003){s_cbb<oo4004?(a1=cbb0000001716(a3),a2=cbb0000001717(a1),a1=a1<a2,a1=cbb0000001718(a1),function(){a1=shuz[start++];a3=cbb0000001719(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001720(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001721(a4);}()):(j=cbb0000001192(a1),j2=cbb0000001193(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001194(a2),a2=cbb0000001195(a1),a1=a1/a2,a3=cbb0000001196(a1));}else if(s_cbb<oo4008){a1=cbb0000005121(a3);a2=cbb0000005122(a4);a1=a1%a2;a5=cbb0000005123(a1);a1=cbb0000005124(a4);a2=cbb0000005125(a1);a1=a1>=a2;a2=cbb0000005126(a1);}else{a1=cbb000000789(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000790(a1)):a9=oo4829;;a1=cbb000000791(a2);a2=cbb000000792(a1);a1=a2===a1;a3=cbb000000793(a1);}}else if(s_cbb<oo4010){if(s_cbb<oo4011){a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001215(a2);a2=cbb0000001216(a3);a1=a1&a2;a3=cbb0000001217(a1);}else{a1=cbb0000001042(a3);a2=cbb0000001043(a1);a1=a1<a2;a1=cbb0000001044(a1);return;}}else{a2=cbb0000004685(allthis);a1=cbb0000004686(a3);a2=cbb0000004687(a1);a2.push(a1);a1=cbb0000004688(a2);}}else s_cbb<oo4018?s_cbb<oo4019?s_cbb<oo4020?(a1=cbb000000825(a5),a2=cbb000000826(a6),a1=a2*a1,a7=cbb000000827(a1),a1=cbb000000828(a5),a2=cbb000000829(a9),a1=a2<a1,a6=cbb000000830(a1)):(a1=cbb0000006570(a6),a2=cbb0000006571(a7),a1=a1|a2,a8=cbb0000006572(a1),a1=cbb0000006573(a3),a2=cbb0000006574(a4),a1=a1^a2,a5=cbb0000006575(a1)):s_cbb<oo4021?(a1=cbb0000004719(a3),a2=cbb0000004720(a1),a2.push(a1),a1=cbb0000004721(a2),a1=cbb0000004722(a2),a2=cbb0000004723(a3),a1=a1&a2,a3=cbb0000004724(a1)):(a5=cbb0000002710(a1),a2=cbb0000002711(void a5),a1=cbb0000002712(a5),a2=cbb0000002713(a9),a1=a2<a1,a6=cbb0000002714(a1)):s_cbb<oo4022?s_cbb<oo4023?(a1=cbb0000005079(a4),a2=cbb0000005080(a5),a1=a1-a2,a6=cbb0000005081(a1),a1=cbb0000005082(a3),a2=cbb0000005083(a3),a1=a1>a2,a2=cbb0000005084(a1)):(a5=cbb0000001363(a1),a2=cbb0000001364(void a5),function(){debugger;}()):(a2=cbb0000005482([]),a1=cbb0000005483(a2),a2=cbb0000005484(a3),a1=a1&a2,a3=cbb0000005485(a1));}else if(s_cbb<oo4024){if(s_cbb<oo4025){if(s_cbb<oo4026){if(s_cbb<oo4027){s_cbb<oo4028?(a1=cbb0000002140(a2),a2=cbb0000002141(a1),a1=a2==a1,a4=cbb0000002142(a1),a1=cbb0000002143(a3),a2=cbb0000002144(a4),a3=cbb0000002145(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb0000004837(a2),a2=cbb0000004838(a1),a1=a2==a1,a4=cbb0000004839(a1),a1=cbb0000004840(a2),a2=cbb0000004841(a1),a1=a1+a2,a1=cbb0000004842(a1));}else if(s_cbb<oo4029){a1=cbb0000001015(a3);a2=cbb0000001016(a4);a1=a1^a2;a5=cbb0000001017(a1);return;}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008411(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008412(a2);a2=cbb0000008413(a1);a1=a2==a1;a4=cbb0000008414(a1);}}else s_cbb<oo4041?s_cbb<oo4042?(a1=cbb0000007281(a5),a2=cbb0000007282(a6),a1=a2!==a1,a1=cbb0000007283(a1),a1=cbb0000007284(a3),a2=cbb0000007285(a1),a1=a1<a2,a1=cbb0000007286(a1)):(a1=cbb0000008233(a4),a2=cbb0000008234(a5),a1=a1-a2,a6=cbb0000008235(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008236(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb0000002164(a3),a2=cbb0000002165(a4),a3=cbb0000002166(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002167(a5),a2=cbb0000002168(a6),a1=a2!==a1,a1=cbb0000002169(a1));}else s_cbb<oo4044?s_cbb<oo4045?s_cbb<oo4046?(a5=cbb0000003549(a2),a2=cbb0000003550(-a5),a1=cbb0000003551(a4),a2=cbb0000003552(a5),a1=a1-a2,a6=cbb0000003553(a1)):(a1=cbb0000002254(a3),a2=cbb0000002255(a4),a3=cbb0000002256(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002257(a5),a2=cbb0000002258(a6),a1=a2!==a1,a1=cbb0000002259(a1)):s_cbb<oo4047?(a1=cbb000000604(a5),a2=cbb000000605(a3),a3=cbb000000606(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a2=cbb000000607(allthis)):(a1=cbb0000006963(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000006964(a2),a2=cbb0000006965(a1),a1=a1+a2,a1=cbb0000006966(a1)):s_cbb<oo4049?s_cbb<oo4050?(a1=cbb0000007349(a2),a2=cbb0000007350(a1),a1=a2-a1,a1=cbb0000007351(a1),all=cbbb,a1=cbb0000007352(cbbb)):(a1=cbb0000003266(a6),a2=cbb0000003267(a1),a1=a1>>>a2,a2=cbb0000003268(a1),a1=cbb0000003269(a2),a2=cbb0000003270(a1),a1=a2===a1,a3=cbb0000003271(a1)):(a1=cbb0000001082(a5),a2=cbb0000001083(a6),a1=a2!==a1,a1=cbb0000001084(a1),j=cbb0000001085(a1),j2=cbb0000001086(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1);}else s_cbb<oo4051?s_cbb<oo4052?s_cbb<oo4053?s_cbb<oo4054?(a5=cbb0000002243(a1),a2=cbb0000002244(void a5),a1=cbb0000002245(a3),a2=cbb0000002246(a4),a3=cbb0000002247(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(function(){a1=cbb0000002479(a2);throw a1;}(),a1=cbb0000002480(a4),a2=cbb0000002481(a1),a1=a1>=a2,a2=cbb0000002482(a1)):s_cbb<oo4055?(a1=cbb0000006528(a3),a2=cbb0000006529(a4),a1=a1^a2,a5=cbb0000006530(a1),a1=cbb0000006531(a6),a2=cbb0000006532(a1),a1=a1>>>a2,a2=cbb0000006533(a1)):(a1=cbb0000001407(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001408(a2),a2=cbb0000001409(a1),a1=a2==a1,a4=cbb0000001410(a1)):s_cbb<oo4057?s_cbb<oo4058?(a1=cbb0000001068(a2),a2=cbb0000001069(a1),a1=a1/a2,a3=cbb0000001070(a1),j=cbb0000001071(a1),j2=cbb0000001072(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a2=cbb0000004073(allthis),a1=cbb0000004074(a2),a2=cbb0000004075(a1),a1=a1+a2,a1=cbb0000004076(a1)):(function(){debugger;}(),a1=cbb0000004392(a2),a2=cbb0000004393(a3),a3=cbb0000004394(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004395(a3)):s_cbb<oo4059?s_cbb<oo4060?s_cbb<oo4061?(a1=cbb0000002278(a3),a2=cbb0000002279(a4),a3=cbb0000002280(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002281(a2),a2=cbb0000002282(a1),a1=a2===a1,a3=cbb0000002283(a1)):(a1=cbb0000006348(a2),a2=cbb0000006349(a1),a1=a1/a2,a3=cbb0000006350(a1),a1=cbb0000006351(a2),a2=cbb0000006352(a1),a1=a1/a2,a3=cbb0000006353(a1)):(a1=cbb0000006686(a3),a2=cbb0000006687(a4),a1=a1%a2,a5=cbb0000006688(a1),function(){debugger;}()):s_cbb<oo4062?s_cbb<oo4063?(a1=cbb0000005067(a3),a2=cbb0000005068(a3),a1=a1>a2,a2=cbb0000005069(a1),a1=cbb0000005070(a2),a2=cbb0000005071(a1),a1=a2-a1,a1=cbb0000005072(a1)):(a1=cbb0000002231(a3),a2=cbb0000002232(a4),a3=cbb0000002233(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002234(a3),a2=cbb0000002235(a1),a1=a1<a2,a1=cbb0000002236(a1)):(j=cbb0000001114(a1),j2=cbb0000001115(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001116(cbbb),a2=cbb0000001117(a1),a1=a1<=a2,a5=cbb0000001118(a1));}else if(s_cbb<oo4064){if(s_cbb<oo4065){if(s_cbb<oo4066){if(s_cbb<oo4067){s_cbb<oo4068?s_cbb<oo4069?s_cbb<oo4070?s_cbb<oo4071?s_cbb<oo4072?(a5=cbb0000002321(a3),a4=cbb0000002322(!a5),a1=cbb0000002323(a3),a2=cbb0000002324(a3),a1=a1>a2,a2=cbb0000002325(a1)):(a1=cbb0000007341(a6),a2=cbb0000007342(a1),a1=a1>>>a2,a2=cbb0000007343(a1),all=cbbb,a1=cbb0000007344(cbbb)):s_cbb<oo4073?(a1=cbb0000008073(a5),a2=cbb0000008074(a9),a1=a2<a1,a6=cbb0000008075(a1),a1=shuz[start++],a2=cbb0000008076(constantPool[a1])):(a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008167(a1),a1=cbb0000008168(a6),a2=cbb0000008169(a7),a1=a1|a2,a8=cbb0000008170(a1)):s_cbb<oo4074?s_cbb<oo4075?(a1=cbb0000002182(a3),a2=cbb0000002183(a4),a3=cbb0000002184(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002185(a4),a2=cbb0000002186(a5),a1=a1-a2,a6=cbb0000002187(a1)):(a1=cbb000000522(a5),a2=cbb000000523(a3),a3=cbb000000524(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,function(){debugger;}()):(a1=cbb0000005997(a6),a2=cbb0000005998(a7),a1=a1|a2,a8=cbb0000005999(a1),a1=cbb0000006000(a6),a2=cbb0000006001(a7),a1=a1|a2,a8=cbb0000006002(a1)):s_cbb<oo4076?s_cbb<oo4077?s_cbb<oo4078?(a1=cbb0000005894(a3),a2=cbb0000005895(a4),a1=a1>>a2,a5=cbb0000005896(a1),a1=cbb0000005897({})):(a1=cbb0000005761(a3),a2=cbb0000005762(a4),a1=a1>>a2,a5=cbb0000005763(a1),a1=cbb0000005764(a2),a2=cbb0000005765(a1),a1=a2===a1,a3=cbb0000005766(a1)):s_cbb<oo4079?(a1=cbb0000002260(a3),a2=cbb0000002261(a4),a1=a1>>a2,a5=cbb0000002262(a1),a1=cbb0000002263(a3),a2=cbb0000002264(a4),a3=cbb0000002265(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb0000002955(a5),a2=cbb0000002956(a6),a1=a2!==a1,a1=cbb0000002957(a1),a1=cbb0000002958(a2),a2=cbb0000002959(a1),a1=a1/a2,a3=cbb0000002960(a1)):s_cbb<oo4080?s_cbb<oo4081?(function(){a1=cbb0000002502(a2);throw a1;}(),a1=cbb0000002503(a5),a2=cbb0000002504(a6),a1=a2*a1,a7=cbb0000002505(a1)):(a1=cbb0000006729(a3),a2=cbb0000006730(a4),a1=a1%a2,a5=cbb0000006731(a1),a1=cbb0000006732(a3),a2=cbb0000006733(a4),a1=a1%a2,a5=cbb0000006734(a1)):(function(){a1=shuz[start++];a3=cbb0000001764(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001765(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001766(a4);}(),a1=cbb0000001767(a2),a2=cbb0000001768(a1),a1=a2===a1,a3=cbb0000001769(a1)):s_cbb<oo4085?s_cbb<oo4086?s_cbb<oo4087?s_cbb<oo4088?(a1=cbb0000006932(a5),a2=cbb0000006933(a6),a1=a2*a1,a7=cbb0000006934(a1),a1=cbb0000006935(a5),a2=cbb0000006936(a6),a1=a2!==a1,a1=cbb0000006937(a1)):(a1=cbb0000004659(a3),a2=cbb0000004660(a1),a2.push(a1),a1=cbb0000004661(a2),a1=cbb0000004662(a2),a2=cbb0000004663(a1),a1=a2===a1,a3=cbb0000004664(a1)):s_cbb<oo4089?(a1=cbb0000005772(a3),a2=cbb0000005773(a4),a1=a1>>a2,a5=cbb0000005774(a1),a5=cbb0000005775(a1),a2=cbb0000005776(void a5)):(a1=cbb0000005617(a6),a2=cbb0000005618(a1),a1=a1>>>a2,a2=cbb0000005619(a1),a2=cbb0000005620(allthis)):s_cbb<oo4090?s_cbb<oo4091?(a1=cbb0000002726(a2),a2=cbb0000002727(a3),a1=a1&a2,a3=cbb0000002728(a1),a5=cbb0000002729(a1),a2=cbb0000002730(void a5)):(a1=cbb0000001291(a5),a2=cbb0000001292(a9),a1=a2<a1,a6=cbb0000001293(a1),function(){debugger;}()):(a1=cbb0000001498(a5),a2=cbb0000001499(a6),a1=a2!==a1,a1=cbb0000001500(a1),a1=shuz[start++],start+=a1):s_cbb<oo4092?s_cbb<oo4093?s_cbb<oo4094?(a1=cbb0000004492({}),a5=cbb0000004493(a3),a1=cbb0000004494(~a5)):(function(){debugger;}(),a1=cbb0000001345(a5),a2=cbb0000001346(a6),a1=a2!==a1,a1=cbb0000001347(a1)):s_cbb<oo4095?(a1=cbb0000005824(a3),a2=cbb0000005825(a4),a1=a1>>a2,a5=cbb0000005826(a1),a1=cbb0000005827(a5),a2=cbb0000005828(a6),a1=a2!==a1,a1=cbb0000005829(a1)):(function(){i=[];a2=cbb0000002571(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002572(a4),a2=cbb0000002573(a1),a1=a1>=a2,a2=cbb0000002574(a1)):s_cbb<oo4096?s_cbb<oo4097?(a1=cbb0000006343(a2),a2=cbb0000006344(a1),a1=a1/a2,a3=cbb0000006345(a1),a5=cbb0000006346(a1),a2=cbb0000006347(void a5)):(a1=cbb0000006384(a2),a2=cbb0000006385(a1),a1=a1/a2,a3=cbb0000006386(a1),a1=cbb0000006387(a2),a2=cbb0000006388(a1),a1=a1/a2,a3=cbb0000006389(a1)):(a1=shuz[start++],a2=cbb0000008069(constantPool[a1]),a1=cbb0000008070(cbbb),a2=cbb0000008071(a1),a1=a1<=a2,a5=cbb0000008072(a1));}else if(s_cbb<oo4098){if(s_cbb<oo4099){if(s_cbb<oo4100){if(s_cbb<oo4101){s_cbb<oo4102?(a1=cbb0000001422(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a5=cbb0000001423(a1),a2=cbb0000001424(void a5)):(a1=cbb0000006611(a3),a2=cbb0000006612(a4),a1=a1%a2,a5=cbb0000006613(a1),a2=cbb0000006614([]));}else if(s_cbb<oo4104){a1=cbb000000799({});a1=cbb000000800(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000801(a1)):a9=oo4829;;}else{a1=cbb0000002116(a3);a2=cbb0000002117(a4);a3=cbb0000002118(a5);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000002119(cbbb);a2=cbb0000002120(a1);a1=a1<=a2;a5=cbb0000002121(a1);}}else if(s_cbb<oo4106){s_cbb<oo4107?(a1=cbb0000004610(a2),a2=cbb0000004611(a1),a1=a2-a1,a1=cbb0000004612(a1),a1=cbb0000004613(a3),a2=cbb0000004614(a1),a2.push(a1),a1=cbb0000004615(a2)):(a1=cbb000000945(a2),a2=cbb000000946(a3),a1=a1&a2,a3=cbb000000947(a1),a1=cbb000000948(a5),a2=cbb000000949(a9),a1=a2<a1,a6=cbb000000950(a1));}else{a1=cbb0000007081(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a2=cbb0000007082([]);}}else s_cbb<oo4109?s_cbb<oo4110?s_cbb<oo4111?(a1=cbb0000003108(a5),a2=cbb0000003109(a6),a1=a2!==a1,a1=cbb0000003110(a1),a2=cbb0000003111([])):(function(){a1=cbb0000001651(a2);a2=cbb0000001652(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001653(a1);}(),a1=cbb0000001654(a3),a2=cbb0000001655(a4),a1=a1>>a2,a5=cbb0000001656(a1)):s_cbb<oo4112?(a1=cbb0000007042(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a5=cbb0000007043(a1),a2=cbb0000007044(void a5)):(a1=cbb0000007222(a3),a2=cbb0000007223(a1),a1=a1<a2,a1=cbb0000007224(a1),a1=cbb0000007225(a4),a2=cbb0000007226(a5),a1=a1-a2,a6=cbb0000007227(a1)):s_cbb<oo4114?s_cbb<oo4115?(a1=cbb0000006290(a2),a2=cbb0000006291(a1),a1=a1/a2,a3=cbb0000006292(a1),a1=cbb0000006293(a6),a2=cbb0000006294(a7),a1=a1|a2,a8=cbb0000006295(a1)):(a1=cbb0000007780(a4),a2=cbb0000007781(a5),a1=a1-a2,a6=cbb0000007782(a1),a2=cbb0000007783([])):(a1=cbb0000006097(a3),a2=cbb0000006098(a2),a1=a1<<a2,a1=cbb0000006099(a1),a1=cbb0000006100(a3),a2=cbb0000006101(a1),a1=a1<a2,a1=cbb0000006102(a1));}else if(s_cbb<oo4116){if(s_cbb<oo4117){s_cbb<oo4118?s_cbb<oo4119?(a1=cbb0000005249(a5),a2=cbb0000005250(a9),a1=a2<a1,a6=cbb0000005251(a1),a1=cbb0000005252(a4),a2=cbb0000005253(a1),a1=a1>=a2,a2=cbb0000005254(a1)):(a1=cbb0000006534(a3),a2=cbb0000006535(a4),a1=a1^a2,a5=cbb0000006536(a1),a1=cbb0000006537(a2),a2=cbb0000006538(a1),a1=a1/a2,a3=cbb0000006539(a1)):s_cbb<oo4120?(a1=cbb00000038(a3),a2=cbb00000039(a1),a1=a1<a2,a1=cbb00000040(a1),a1=cbb00000041(a3),a2=cbb00000042(a1),a1=a1 in a2,a1=cbb00000043(a1)):(function(){debugger;}(),a1=cbb0000005655(a6),a2=cbb0000005656(a1),a1=a1>>>a2,a2=cbb0000005657(a1));}else if(s_cbb<oo4121){s_cbb<oo4122?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008329(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008330(a2),a2=cbb0000008331(a1),a1=a1+a2,a1=cbb0000008332(a1)):(a5=cbb0000003579(a2),a2=cbb0000003580(-a5),a1=cbb0000003581(a6),a2=cbb0000003582(a7),a1=a1|a2,a8=cbb0000003583(a1));}else{(function(){debugger;})();a1=cbb0000003447(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003448(a1)):a9=oo4829;;}}else if(s_cbb<oo4125){if(s_cbb<oo4126){s_cbb<oo4127?(a1=cbb0000004895(a5),a2=cbb0000004896(a6),a1=a2*a1,a7=cbb0000004897(a1),a1=cbb0000004898(a2),a2=cbb0000004899(a1),a1=a2==a1,a4=cbb0000004900(a1)):(a1=cbb0000002176(a3),a2=cbb0000002177(a4),a3=cbb0000002178(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002179(a3),a2=cbb0000002180(a4),a1=a1>>a2,a5=cbb0000002181(a1));}else if(s_cbb<oo4128){j=cbb0000001124(a1);j2=cbb0000001125(a2);j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1;a1=cbb0000001126(a4);a2=cbb0000001127(a5);a1=a1-a2;a6=cbb0000001128(a1);}else{a1=cbb0000008361(a2);a2=cbb0000008362(a1);a1=a2-a1;a1=cbb0000008363(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008364(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo4140?s_cbb<oo4141?(a1=cbb0000002907(a3),a2=cbb0000002908(a1),a3=delete a2[a1],a1=cbb0000002909(a3),a1=cbb0000002910(a5),a2=cbb0000002911(a6),a1=a2*a1,a7=cbb0000002912(a1)):(a2=cbb0000008065([]),a1=shuz[start++],a2=cbb0000008066(constantPool[a1])):(a1=cbb0000006593(a3),a2=cbb0000006594(a4),a1=a1^a2,a5=cbb0000006595(a1),a1=cbb0000006596(a3),a2=cbb0000006597(a4),a1=a1^a2,a5=cbb0000006598(a1));}else if(s_cbb<oo4142){if(s_cbb<oo4143){if(s_cbb<oo4144){if(s_cbb<oo4145){if(s_cbb<oo4146){if(s_cbb<oo4147){a1=cbb0000001006(a3);a2=cbb0000001007(a4);a1=a1%a2;a5=cbb0000001008(a1);return;}else{a1=cbb0000001242(cbbb);a2=cbb0000001243(a1);a1=a1<=a2;a5=cbb0000001244(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo4154?(a1=cbb0000004273(a2),a2=cbb0000004274(a1),a1=a2==a1,a4=cbb0000004275(a1),a1=cbb0000004276(a2),a2=cbb0000004277(a3),a3=cbb0000004278(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004279(a3)):(a1=cbb0000008030(a2),a2=cbb0000008031(a1),a1=a2==a1,a4=cbb0000008032(a1),a1=shuz[start++],a2=cbb0000008033(constantPool[a1]));}else s_cbb<oo4155?s_cbb<oo4156?(a1=cbb000000587(a5),a2=cbb000000588(a3),a3=cbb000000589(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000590(a2),a2=cbb000000591(a1),a1=a1/a2,a3=cbb000000592(a1)):(a1=cbb0000005438(a3),a2=cbb0000005439(a2),a1=a1<<a2,a1=cbb0000005440(a1),a1=cbb0000005441(a2),a2=cbb0000005442(a3),a1=a1&a2,a3=cbb0000005443(a1)):(a1=cbb0000005403(a2),a2=cbb0000005404(a1),a1=a1+a2,a1=cbb0000005405(a1),a1=cbb0000005406(cbbb),a2=cbb0000005407(a1),a1=a1<=a2,a5=cbb0000005408(a1));}else s_cbb<oo4157?s_cbb<oo4158?s_cbb<oo4159?(a1=cbb0000003278(a2),a2=cbb0000003279(a1),a1=a2===a1,a3=cbb0000003280(a1),a1=cbb0000003281(a6),a2=cbb0000003282(a1),a1=a1>>>a2,a2=cbb0000003283(a1)):(a1=cbb0000001581(a2),a2=cbb0000001582(a1),a1=a2-a1,a1=cbb0000001583(a1),function(){a1=cbb0000001584(a2);a2=cbb0000001585(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001586(a1);}()):s_cbb<oo4160?(a1=cbb0000003889(a3),a2=cbb0000003890(a1),a1=a1<a2,a1=cbb0000003891(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003892()):a4.splice(oo4806,oo4806,cbb0000003893());}a1=cbb0000003894(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a5=cbb0000003622(a2),a2=cbb0000003623(-a5),a5=cbb0000003624(a3),a4=cbb0000003625(!a5)):s_cbb<oo4170?s_cbb<oo4171?(a5=cbb0000003732(a3),a1=cbb0000003733(typeof a5),a1=cbb0000003734({})):(a1=cbb0000004968(a3),a2=cbb0000004969(a3),a1=a1>a2,a2=cbb0000004970(a1),a1=cbb0000004971(a4),a2=cbb0000004972(a5),a1=a1-a2,a6=cbb0000004973(a1)):(a5=cbb0000003617(a2),a2=cbb0000003618(-a5),a1=cbb0000003619(),a2=cbb0000003620(),a1=a2!=a1,cbb0000003621(a1));}else if(s_cbb<oo4172){s_cbb<oo4173?s_cbb<oo4174?s_cbb<oo4175?(a5=cbb0000001442(a3),a4=cbb0000001443(!a5),a1=cbb0000001444(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):(a1=cbb0000005391(cbbb),a2=cbb0000005392(a1),a1=a1<=a2,a5=cbb0000005393(a1),a1=cbb0000005394(a6),a2=cbb0000005395(a7),a1=a1|a2,a8=cbb0000005396(a1)):s_cbb<oo4177?(a1=cbb0000003324(cbbb),a2=cbb0000003325(a1),a1=a1<=a2,a5=cbb0000003326(a1),a1=cbb0000003327(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=cbb0000007275(a2),a2=cbb0000007276(a3),a1=a1&a2,a3=cbb0000007277(a1),a1=cbb0000007278(a3),a2=cbb0000007279(a1),a1=a1<a2,a1=cbb0000007280(a1)):s_cbb<oo4179?s_cbb<oo4180?(a1=cbb0000001488(a3),a2=cbb0000001489(a4),a1=a1^a2,a5=cbb0000001490(a1),a1=shuz[start++],start+=a1):(a1=cbb0000002032(a2),a2=cbb0000002033(a1),a1=a2===a1,a3=cbb0000002034(a1),a1=cbb0000002035({})):(a1=cbb0000007949(a3),a2=cbb0000007950(a2),a1=a1<<a2,a1=cbb0000007951(a1),a1=shuz[start++],a2=cbb0000007952(a1));}else if(s_cbb<oo4181){if(s_cbb<oo4182){if(s_cbb<oo4183){a1=cbb0000007114(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007115(a2);a2=cbb0000007116(a1);a1=a2==a1;a4=cbb0000007117(a1);}else{a1=cbb0000004794(a4);a2=cbb0000004795(a5);a1=a1-a2;a6=cbb0000004796(a1);a1=cbb0000004797(a2);a2=cbb0000004798(a1);a1=a2==a1;a4=cbb0000004799(a1);}}else if(s_cbb<oo4185){a1=cbb000000727(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000728(a1)):a9=oo4829;;a1=cbb000000729(a3);a2=cbb000000730(a4);a1=a1^a2;a5=cbb000000731(a1);}else{a1=cbb0000007396(a3);a2=cbb0000007397(a4);a1=a1^a2;a5=cbb0000007398(a1);all=cbbb;a1=cbb0000007399(cbbb);}}else s_cbb<oo4187?s_cbb<oo4188?(a2=cbb0000003994(allthis),a1=cbb0000003995(a2),a2=cbb0000003996(a1),a1=a1+a2,a1=cbb0000003997(a1)):(a1=cbb0000002763(a3),a2=cbb0000002764(a4),a1=a1%a2,a5=cbb0000002765(a1),a5=cbb0000002766(a1),a2=cbb0000002767(void a5)):(a1=cbb0000007548(cbbb),a2=cbb0000007549(a1),a1=a1<=a2,a5=cbb0000007550(a1),a1=shuz[start++],a2=cbb0000007551(a3),a3=cbb0000007552(a1),a2[constantPool[a1]]=a3);}else if(s_cbb<oo4189){if(s_cbb<oo4190){if(s_cbb<oo4191){s_cbb<oo4192?s_cbb<oo4193?(a1=cbb0000004327(a2),a2=cbb0000004328(a3),a3=cbb0000004329(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004330(a3),a1=cbb0000004331({})):(a1=cbb000000630({}),a1=cbb000000631(a5),a2=cbb000000632(a3),a3=cbb000000633(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):s_cbb<oo4194?(function(){i=[];a2=cbb0000002619(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002620(a2),a2=cbb0000002621(a1),a1=a1+a2,a1=cbb0000002622(a1)):(a1=cbb0000005012(a3),a2=cbb0000005013(a3),a1=a1>a2,a2=cbb0000005014(a1),a1=cbb0000005015(a4),a2=cbb0000005016(a5),a1=a1-a2,a6=cbb0000005017(a1));}else if(s_cbb<oo4195){if(s_cbb<oo4196){a1=cbb0000001209(a3);a2=cbb0000001210(a1);a1=a1<a2;a1=cbb0000001211(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}else{a1=cbb0000005350(cbbb);a2=cbb0000005351(a1);a1=a1<=a2;a5=cbb0000005352(a1);a1=cbb0000005353(a2);a2=cbb0000005354(a1);a1=a1/a2;a3=cbb0000005355(a1);}}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008443(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008444(cbbb);a2=cbb0000008445(a1);a1=a1<=a2;a5=cbb0000008446(a1);}}else s_cbb<oo4214?s_cbb<oo4215?s_cbb<oo4216?(a1=cbb0000001172(a2),a2=cbb0000001173(a1),a1=a2==a1,a4=cbb0000001174(a1),j=cbb0000001175(a1),j2=cbb0000001176(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a2=cbb0000005278([]),a1=cbb0000005279(cbbb),a2=cbb0000005280(a1),a1=a1<=a2,a5=cbb0000005281(a1)):s_cbb<oo4217?(a1=cbb0000003128(a2),a2=cbb0000003129(a1),a1=a2===a1,a3=cbb0000003130(a1),a1=cbb0000003131(),a2=cbb0000003132(),a1=a2!=a1,cbb0000003133(a1)):(a1=cbb0000002158(a2),a2=cbb0000002159(a1),a1=a2-a1,a1=cbb0000002160(a1),a1=cbb0000002161(a3),a2=cbb0000002162(a4),a3=cbb0000002163(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):s_cbb<oo4218?s_cbb<oo4219?(a1=cbb0000001907(a4),a2=cbb0000001908(a1),a1=a1>=a2,a2=cbb0000001909(a1),a2=cbb0000001910([])):(a1=cbb0000006430(a2),a2=cbb0000006431(a1),a1=a1/a2,a3=cbb0000006432(a1),a5=cbb0000006433(a1),a2=cbb0000006434(void a5)):(a1=cbb0000006711(a3),a2=cbb0000006712(a4),a1=a1%a2,a5=cbb0000006713(a1),a1=cbb0000006714(a5),a2=cbb0000006715(a9),a1=a2<a1,a6=cbb0000006716(a1));}else if(s_cbb<oo4220){s_cbb<oo4221?s_cbb<oo4222?s_cbb<oo4223?(a1=cbb0000006626(a3),a2=cbb0000006627(a4),a1=a1^a2,a5=cbb0000006628(a1),a1=cbb0000006629(a3),a2=cbb0000006630(a4),a1=a1%a2,a5=cbb0000006631(a1)):(a2=cbb0000007455([]),a1=shuz[start++],a2=cbb0000007456(a3),a3=cbb0000007457(a1),a2[constantPool[a1]]=a3):s_cbb<oo4224?(a1=cbb0000003641(a5),a2=cbb0000003642(a9),a1=a2<a1,a6=cbb0000003643(a1),a5=cbb0000003644(a2),a2=cbb0000003645(-a5)):(a1=cbb0000004914(a2),a2=cbb0000004915(a1),a1=a2==a1,a4=cbb0000004916(a1),a1=cbb0000004917(a2),a2=cbb0000004918(a1),a1=a2==a1,a4=cbb0000004919(a1)):s_cbb<oo4225?s_cbb<oo4226?(a1=cbb000000407(a2),a2=cbb000000408(a1),a1=a2-a1,a1=cbb000000409(a1),a1=cbb000000410({})):(a1=cbb0000005397(cbbb),a2=cbb0000005398(a1),a1=a1<=a2,a5=cbb0000005399(a1),a1=cbb0000005400(a2),a2=cbb0000005401(a1),a1=a2-a1,a1=cbb0000005402(a1)):(a1=cbb0000006955(a4),a2=cbb0000006956(a1),a1=a1>=a2,a2=cbb0000006957(a1),a1=cbb0000006958(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828);}else if(s_cbb<oo4228){if(s_cbb<oo4229){if(s_cbb<oo4230){a1=shuz[start++];a2=cbb0000007480(a3);a3=cbb0000007481(a1);a2[constantPool[a1]]=a3;a1=cbb0000007482(a3);a2=cbb0000007483(a4);a1=a1^a2;a5=cbb0000007484(a1);}else{a1=cbb0000007133(a3);a2=cbb0000007134(a2);a1=a1<<a2;a1=cbb0000007135(a1);a1=cbb0000007136(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else{a5=cbb0000008245(a1);a2=cbb0000008246(void a5);(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008247(a2);cbbb[a9]=argsList[a2];}})();}}else s_cbb<oo4233?s_cbb<oo4234?(a2=cbb0000001952([]),a1=cbb0000001953(a6),a2=cbb0000001954(a7),a1=a1|a2,a8=cbb0000001955(a1)):(a2=cbb0000006075(allthis),a1=cbb0000006076(a6),a2=cbb0000006077(a7),a1=a1|a2,a8=cbb0000006078(a1)):(a5=cbb0000003539(a2),a2=cbb0000003540(-a5),a1=cbb0000003541(),a2=cbb0000003542(),a1=a2!=a1,cbb0000003543(a1));}else if(s_cbb<oo4235){if(s_cbb<oo4236){if(s_cbb<oo4237){if(s_cbb<oo4238){s_cbb<oo4239?s_cbb<oo4240?s_cbb<oo4241?(a2=cbb0000001310([]),function(){debugger;}()):(a1=cbb0000004371(a2),a2=cbb0000004372(a1),a1=a2===a1,a3=cbb0000004373(a1),a1=cbb0000004374(a2),a2=cbb0000004375(a3),a3=cbb0000004376(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004377(a3)):s_cbb<oo4242?(a1=cbb0000008034(a2),a2=cbb0000008035(a1),a1=a1/a2,a3=cbb0000008036(a1),a1=shuz[start++],a2=cbb0000008037(constantPool[a1])):(a1=cbb0000003308(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003309(a4),a2=cbb0000003310(a5),a1=a1-a2,a6=cbb0000003311(a1)):s_cbb<oo4244?s_cbb<oo4245?(a1=cbb00000066(a3),a2=cbb00000067(a1),a1=a1 in a2,a1=cbb00000068(a1),a1=cbb00000069(a4),a2=cbb00000070(a1),a1=a1>=a2,a2=cbb00000071(a1)):(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008306(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008307(a3),a2=cbb0000008308(a2),a1=a1<<a2,a1=cbb0000008309(a1)):(a1=cbb0000003316(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828,a1=cbb0000003317(a4),a2=cbb0000003318(a5),a1=a1-a2,a6=cbb0000003319(a1));}else if(s_cbb<oo4248){if(s_cbb<oo4249){s_cbb<oo4250?(a1=cbb0000004771(a2),a2=cbb0000004772(a1),a1=a2==a1,a4=cbb0000004773(a1),a5=cbb0000004774(a3),a4=cbb0000004775(!a5)):(a1=cbb000000916(a5),a2=cbb000000917(a9),a1=a2<a1,a6=cbb000000918(a1),a1=cbb000000919(a6),a2=cbb000000920(a1),a1=a1>>>a2,a2=cbb000000921(a1));}else if(s_cbb<oo4251){a1=cbb0000003403(a6);a2=cbb0000003404(a7);a1=a1|a2;a8=cbb0000003405(a1);a1=cbb0000003406(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003407(a1)):a9=oo4829;;}else{a1=cbb0000005467(a2);a2=cbb0000005468(a3);a1=a1&a2;a3=cbb0000005469(a1);a1=cbb0000005470(a2);a2=cbb0000005471(a1);a1=a2-a1;a1=cbb0000005472(a1);}}else s_cbb<oo4253?s_cbb<oo4254?(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004137(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004138(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004139(a4);}(),a1=cbb0000004140(a3),a2=cbb0000004141(a4),a1=a1^a2,a5=cbb0000004142(a1)):(a1=cbb0000005795(cbbb),a2=cbb0000005796(a1),a1=a1<=a2,a5=cbb0000005797(a1),a1=cbb0000005798(a3),a2=cbb0000005799(a4),a1=a1>>a2,a5=cbb0000005800(a1)):(a1=cbb0000007669(a2),a2=cbb0000007670(a1),a1=a1+a2,a1=cbb0000007671(a1),a1=cbb0000007672(a6),a2=cbb0000007673(a1),a1=a1>>>a2,a2=cbb0000007674(a1));}else s_cbb<oo4262?s_cbb<oo4263?s_cbb<oo4264?s_cbb<oo4265?(a1=cbb0000008178(a5),a2=cbb0000008179(a6),a1=a2!==a1,a1=cbb0000008180(a1),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008181(a1)):(a1=cbb0000002778(a3),a2=cbb0000002779(a1),a3=delete a2[a1],a1=cbb0000002780(a3),a1=cbb0000002781(a3),a2=cbb0000002782(a1),a1=a1<a2,a1=cbb0000002783(a1)):s_cbb<oo4266?(j=cbb0000001187(a1),j2=cbb0000001188(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001189(a4),a2=cbb0000001190(a5),a1=a1-a2,a6=cbb0000001191(a1)):(j=cbb0000001200(a1),j2=cbb0000001201(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a2=cbb0000001202([])):s_cbb<oo4267?s_cbb<oo4268?(a1=cbb000000215(),a2=cbb000000216(),a1=a2!=a1,cbb000000217(a1),a1=cbb000000218(a2),a2=cbb000000219(a1),a1=a2==a1,a4=cbb000000220(a1)):(a1=cbb0000005840(a3),a2=cbb0000005841(a4),a1=a1>>a2,a5=cbb0000005842(a1),a1=cbb0000005843(a3),a2=cbb0000005844(a1),a1=a1<a2,a1=cbb0000005845(a1)):(a1=cbb0000005115(a3),a2=cbb0000005116(a3),a1=a1>a2,a2=cbb0000005117(a1),a1=cbb0000005118(a4),a2=cbb0000005119(a1),a1=a1>=a2,a2=cbb0000005120(a1)):s_cbb<oo4269?s_cbb<oo4270?s_cbb<oo4271?(a1=cbb00000022(a3),a2=cbb00000023(a1),a1=a1 in a2,a1=cbb00000024(a1),a1=cbb00000025(a6),a2=cbb00000026(a1),a1=a1>>>a2,a2=cbb00000027(a1)):(a1=cbb0000002961(a5),a2=cbb0000002962(a6),a1=a2!==a1,a1=cbb0000002963(a1),a1=cbb0000002964(a3),a2=cbb0000002965(a2),a1=a1<<a2,a1=cbb0000002966(a1)):s_cbb<oo4272?(a5=cbb0000007510(a1),a2=cbb0000007511(void a5),a1=shuz[start++],a2=cbb0000007512(a3),a3=cbb0000007513(a1),a2[constantPool[a1]]=a3):(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003836()):a4.splice(oo4806,oo4806,cbb0000003837());}a1=cbb0000003838(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003839(a6),a2=cbb0000003840(a7),a1=a1|a2,a8=cbb0000003841(a1)):s_cbb<oo4282?s_cbb<oo4283?(a1=cbb0000007173(a6),a2=cbb0000007174(a1),a1=a1>>>a2,a2=cbb0000007175(a1),a1=cbb0000007176(a3),a2=cbb0000007177(a1),a1=a1<a2,a1=cbb0000007178(a1)):(a5=cbb0000002390(a3),a4=cbb0000002391(!a5),a1=cbb0000002392(a2),a2=cbb0000002393(a1),a1=a2-a1,a1=cbb0000002394(a1)):(a1=cbb00000016(a3),a2=cbb00000017(a1),a1=a1 in a2,a1=cbb00000018(a1),a1=cbb00000019(a3),a2=cbb00000020(a4),a1=a1>>a2,a5=cbb00000021(a1));}else if(s_cbb<oo4284){if(s_cbb<oo4285){if(s_cbb<oo4286){s_cbb<oo4287?s_cbb<oo4288?(function(){debugger;}(),a5=cbb0000002746(a1),a2=cbb0000002747(void a5)):(a1=cbb0000007308(a3),a2=cbb0000007309(a1),a1=a1<a2,a1=cbb0000007310(a1),a1=cbb0000007311(a2),a2=cbb0000007312(a1),a1=a2===a1,a3=cbb0000007313(a1)):s_cbb<oo4289?(a1=cbb0000001482(cbbb),a2=cbb0000001483(a1),a1=a1<=a2,a5=cbb0000001484(a1),a1=shuz[start++],start+=a1):(a1=cbb0000002070({}),a1=cbb0000002071(a3),a2=cbb0000002072(a4),a1=a1>>a2,a5=cbb0000002073(a1));}else if(s_cbb<oo4290){if(s_cbb<oo4291){a1=cbb000000754(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000755(a1)):a9=oo4829;;a1=cbb000000756(a3);a2=cbb000000757(a3);a1=a1>a2;a2=cbb000000758(a1);}else{a1=cbb0000006717(a5);a2=cbb0000006718(a9);a1=a2<a1;a6=cbb0000006719(a1);a1=cbb0000006720(a3);a2=cbb0000006721(a4);a1=a1%a2;a5=cbb0000006722(a1);}}else{a2=cbb0000002715(allthis);a5=cbb0000002716(a1);a2=cbb0000002717(void a5);}}else if(s_cbb<oo4293){if(s_cbb<oo4294){if(s_cbb<oo4295){a1=cbb0000008389(a6);a2=cbb0000008390(a1);a1=a1>>>a2;a2=cbb0000008391(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008392(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}else{a1=cbb0000004401(a6);a2=cbb0000004402(a1);a1=a1>>>a2;a2=cbb0000004403(a1);a1=cbb0000004404(a2);a2=cbb0000004405(a3);a3=cbb0000004406(a4);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000004407(a3);}}else s_cbb<oo4307?(a1=cbb0000007167(a5),a2=cbb0000007168(a9),a1=a2<a1,a6=cbb0000007169(a1),a1=cbb0000007170(a3),a2=cbb0000007171(a1),a1=a1<a2,a1=cbb0000007172(a1)):(a1=cbb0000003554(a3),a2=cbb0000003555(a2),a1=a1<<a2,a1=cbb0000003556(a1),a5=cbb0000003557(a2),a2=cbb0000003558(-a5));}else s_cbb<oo4308?s_cbb<oo4309?(a1=cbb0000007502(a2),a2=cbb0000007503(a1),a1=a1+a2,a1=cbb0000007504(a1),a1=shuz[start++],a2=cbb0000007505(a3),a3=cbb0000007506(a1),a2[constantPool[a1]]=a3):(a1=cbb0000003070(a5),a2=cbb0000003071(a6),a1=a2*a1,a7=cbb0000003072(a1),a1=cbb0000003073(a5),a2=cbb0000003074(a6),a1=a2!==a1,a1=cbb0000003075(a1)):(a1=cbb0000005915(a3),a2=cbb0000005916(a4),a1=a1>>a2,a5=cbb0000005917(a1),function(){debugger;}());}else if(s_cbb<oo4310){if(s_cbb<oo4311){if(s_cbb<oo4312){if(s_cbb<oo4313){a2=cbb0000004396([]);a1=cbb0000004397(a2);a2=cbb0000004398(a3);a3=cbb0000004399(a4);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000004400(a3);}else{a1=cbb0000001203(a2);a2=cbb0000001204(a3);a1=a1&a2;a3=cbb0000001205(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else s_cbb<oo4320?(a1=cbb000000531(a5),a2=cbb000000532(a3),a3=cbb000000533(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb000000534(a5),a2=cbb000000535(a6),a1=a2!==a1,a1=cbb000000536(a1)):(a1=cbb0000004185(a5),a2=cbb0000004186(a9),a1=a2<a1,a6=cbb0000004187(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004188(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004189(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004190(a4);}());}else s_cbb<oo4328?s_cbb<oo4329?(j=cbb0000001087(a1),j2=cbb0000001088(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1,a1=cbb0000001089(),a2=cbb0000001090(),a1=a2!=a1,cbb0000001091(a1)):(a2=cbb0000004038(allthis),a1=cbb0000004039(a5),a2=cbb0000004040(a6),a1=a2*a1,a7=cbb0000004041(a1)):(a1=cbb0000005725(a6),a2=cbb0000005726(a1),a1=a1>>>a2,a2=cbb0000005727(a1),a1=cbb0000005728(a2),a2=cbb0000005729(a1),a1=a2==a1,a4=cbb0000005730(a1));}else if(s_cbb<oo4330){if(s_cbb<oo4331){if(s_cbb<oo4332){a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}a1=cbb0000001276(a2);a2=cbb0000001277(a1);a1=a2-a1;a1=cbb0000001278(a1);}else{a1=cbb0000007752(a4);a2=cbb0000007753(a5);a1=a1-a2;a6=cbb0000007754(a1);a1=cbb0000007755(a3);a2=cbb0000007756(a1);a1=a1<a2;a1=cbb0000007757(a1);}}else{a1=cbb0000003355(a3);a3=shuz[start++];a1[constantPool[a3]]-=oo4828;a1=cbb0000003356(a5);a2=cbb0000003357(a6);a1=a2!==a1;a1=cbb0000003358(a1);}}else s_cbb<oo4340?s_cbb<oo4341?(a1=cbb0000008193({}),a1=shuz[start++],a2=shuz[start++],a1=new RegExp(constantPool[a1],constantPool[a2]),a4=cbb0000008194(a1)):(a1=cbb0000002192({}),a1=cbb0000002193(a3),a2=cbb0000002194(a4),a3=cbb0000002195(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(a1=cbb0000002931(a3),a2=cbb0000002932(a1),a3=delete a2[a1],a1=cbb0000002933(a3),a1=cbb0000002934(a2),a2=cbb0000002935(a1),a1=a1+a2,a1=cbb0000002936(a1));}else if(s_cbb<oo4342){if(s_cbb<oo4343){if(s_cbb<oo4344){if(s_cbb<oo4345){if(s_cbb<oo4346){s_cbb<oo4347?(a1=cbb000000221(),a2=cbb000000222(),a1=a2!=a1,cbb000000223(a1),a1=cbb000000224(),a2=cbb000000225(),a1=a2!=a1,cbb000000226(a1)):(a1=cbb0000001092(a6),a2=cbb0000001093(a7),a1=a1|a2,a8=cbb0000001094(a1),j=cbb0000001095(a1),j2=cbb0000001096(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1);}else if(s_cbb<oo4348){a1=cbb0000002943(a2);a2=cbb0000002944(a1);a1=a2-a1;a1=cbb0000002945(a1);a1=cbb0000002946(a3);a2=cbb0000002947(a1);a3=delete a2[a1];a1=cbb0000002948(a3);}else{a1=cbb0000008423(a2);a2=cbb0000008424(a3);a1=a1&a2;a3=cbb0000008425(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008426(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo4360?s_cbb<oo4361?(a1=cbb0000004339(a5),a2=cbb0000004340(a6),a1=a2*a1,a7=cbb0000004341(a1),a1=cbb0000004342(a2),a2=cbb0000004343(a3),a3=cbb0000004344(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004345(a3)):(function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003949()):a4.splice(oo4806,oo4806,cbb0000003950());}a1=cbb0000003951(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}(),a1=cbb0000003952(a3),a2=cbb0000003953(a4),a1=a1^a2,a5=cbb0000003954(a1)):(function(){debugger;}(),a5=cbb0000004567(a3),a1=cbb0000004568(~a5));}else if(s_cbb<oo4371){s_cbb<oo4372?s_cbb<oo4373?(a1=cbb0000002471(a3),a2=cbb0000002472(a1),a1=a1<a2,a1=cbb0000002473(a1),function(){a1=cbb0000002474(a2);throw a1;}()):(a1=cbb00000084(a3),a2=cbb00000085(a2),a1=a1<<a2,a1=cbb00000086(a1),a1=cbb00000087(a3),a2=cbb00000088(a1),a1=a1 in a2,a1=cbb00000089(a1)):s_cbb<oo4374?(a1=cbb0000003825(a2),a2=cbb0000003826(a3),a1=a1&a2,a3=cbb0000003827(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003828()):a4.splice(oo4806,oo4806,cbb0000003829());}a1=cbb0000003830(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(function(){debugger;}(),function(){i=[];a2=cbb0000002532(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}());}else if(s_cbb<oo4384){s_cbb<oo4385?(a1=cbb0000006325(a2),a2=cbb0000006326(a1),a1=a2===a1,a3=cbb0000006327(a1),a1=cbb0000006328(a2),a2=cbb0000006329(a1),a1=a1/a2,a3=cbb0000006330(a1)):(a5=cbb0000002493(a3),a4=cbb0000002494(!a5),function(){a1=cbb0000002495(a2);throw a1;}());}else{a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008381(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}a1=cbb0000008382(a2);a2=cbb0000008383(a1);a1=a2===a1;a3=cbb0000008384(a1);}}else s_cbb<oo4397?s_cbb<oo4398?s_cbb<oo4399?s_cbb<oo4400?(function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008314(a2);cbbb[a9]=argsList[a2];}}(),a1=cbb0000008315(a3),a2=cbb0000008316(a3),a1=a1>a2,a2=cbb0000008317(a1)):(a1=cbb00000048(a3),a2=cbb00000049(a1),a1=a1 in a2,a1=cbb00000050(a1),a1=cbb00000051(a3),a2=cbb00000052(a3),a1=a1>a2,a2=cbb00000053(a1)):s_cbb<oo4402?(a1=cbb0000001106(a3),a2=cbb0000001107(a2),a1=a1<<a2,a1=cbb0000001108(a1),j=cbb0000001109(a1),j2=cbb0000001110(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a1=cbb000000383(a6),a2=cbb000000384(a7),a1=a1|a2,a8=cbb000000385(a1),a1=cbb000000386(a2),a2=cbb000000387(a1),a1=a2-a1,a1=cbb000000388(a1)):s_cbb<oo4403?s_cbb<oo4404?(function(){a1=shuz[start++];a3=cbb0000001873(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001874(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001875(a4);}(),a1=cbb0000001876(a3),a2=cbb0000001877(a1),a1=a1<a2,a1=cbb0000001878(a1)):(a1=cbb0000001966(a4),a2=cbb0000001967(a1),a1=a1>=a2,a2=cbb0000001968(a1),a2=cbb0000001969([])):(a2=cbb0000004069(allthis),a1=cbb0000004070(),a2=cbb0000004071(),a1=a2!=a1,cbb0000004072(a1)):s_cbb<oo4408?s_cbb<oo4409?s_cbb<oo4410?(a5=cbb0000003559(a2),a2=cbb0000003560(-a5),a1=cbb0000003561(a3),a2=cbb0000003562(a1),a1=a1<a2,a1=cbb0000003563(a1)):(a1=cbb0000005682(a6),a2=cbb0000005683(a1),a1=a1>>>a2,a2=cbb0000005684(a1),a2=cbb0000005685(allthis)):s_cbb<oo4411?(a1=cbb0000002405(a3),a2=cbb0000002406(a4),a1=a1%a2,a5=cbb0000002407(a1),a5=cbb0000002408(a3),a4=cbb0000002409(!a5)):(a1=cbb0000003794(a6),a2=cbb0000003795(a7),a1=a1|a2,a8=cbb0000003796(a1),a5=cbb0000003797(a3),a1=cbb0000003798(typeof a5)):s_cbb<oo4412?s_cbb<oo4413?(a1=cbb0000002036(a4),a2=cbb0000002037(a5),a1=a1-a2,a6=cbb0000002038(a1),a1=cbb0000002039({})):(a1=cbb000000501(a2),a2=cbb000000502(a1),a1=a2-a1,a1=cbb000000503(a1),a1=cbb000000504(a3),a2=cbb000000505(a4),a1=a1>>a2,a5=cbb000000506(a1)):(a1=cbb000000951(a5),a2=cbb000000952(a9),a1=a2<a1,a6=cbb000000953(a1),a1=cbb000000954(),a2=cbb000000955(),a1=a2!=a1,cbb000000956(a1));}else if(s_cbb<oo4414){if(s_cbb<oo4415){if(s_cbb<oo4416){s_cbb<oo4417?s_cbb<oo4418?(a1=cbb0000002794(a3),a2=cbb0000002795(a2),a1=a1<<a2,a1=cbb0000002796(a1),a1=cbb0000002797(a3),a2=cbb0000002798(a1),a3=delete a2[a1],a1=cbb0000002799(a3)):(a1=cbb0000007900(a2),a2=cbb0000007901(a1),a1=a2===a1,a3=cbb0000007902(a1),a1=shuz[start++],a2=cbb0000007903(a1)):s_cbb<oo4419?(a1=shuz[start++],a2=cbb0000007953(a1),a1=cbb0000007954(),a2=cbb0000007955(),a1=a2!=a1,cbb0000007956(a1)):(a1=cbb0000002108({}),a2=cbb0000002109(allthis));}else if(s_cbb<oo4420){s_cbb<oo4421?(a1=cbb0000001687(),a2=cbb0000001688(),a1=a2!=a1,cbb0000001689(a1),function(){a1=cbb0000001690(a2);a2=cbb0000001691(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001692(a1);}()):(a1=shuz[start++],start+=a1,a5=cbb0000001528(a3),a4=cbb0000001529(!a5));}else{a1=cbb000000997(a5);a2=cbb000000998(a6);a1=a2*a1;a7=cbb000000999(a1);return;}}else if(s_cbb<oo4422){s_cbb<oo4423?s_cbb<oo4424?(a5=cbb0000002637(a1),a2=cbb0000002638(void a5),a1=cbb0000002639(a5),a2=cbb0000002640(a6),a1=a2!==a1,a1=cbb0000002641(a1)):(a2=cbb0000001782([]),function(){a1=shuz[start++];a3=cbb0000001783(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001784(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001785(a4);}()):s_cbb<oo4428?(function(){a1=cbb0000001569(a2);a2=cbb0000001570(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001571(a1);}(),a1=cbb0000001572(a4),a2=cbb0000001573(a5),a1=a1-a2,a6=cbb0000001574(a1)):(a1=cbb0000002937(a3),a2=cbb0000002938(a1),a3=delete a2[a1],a1=cbb0000002939(a3),a1=cbb0000002940(a5),a2=cbb0000002941(a9),a1=a2<a1,a6=cbb0000002942(a1));}else if(s_cbb<oo4429){s_cbb<oo4430?(a1=cbb0000007892(a4),a2=cbb0000007893(a5),a1=a1-a2,a6=cbb0000007894(a1),a2=cbb0000007895(allthis)):(a1=cbb0000003242(a3),a2=cbb0000003243(a2),a1=a1<<a2,a1=cbb0000003244(a1),a1=cbb0000003245(a2),a2=cbb0000003246(a1),a1=a2===a1,a3=cbb0000003247(a1));}else{a2=cbb0000008371([]);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008372(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo4442?s_cbb<oo4443?s_cbb<oo4444?s_cbb<oo4445?(a1=cbb0000003392(a3),a2=cbb0000003393(a4),a1=a1^a2,a5=cbb0000003394(a1),a1=cbb0000003395(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a1=cbb0000004859(a2),a2=cbb0000004860(a1),a1=a2==a1,a4=cbb0000004861(a1),a1=cbb0000004862(a4),a2=cbb0000004863(a5),a1=a1-a2,a6=cbb0000004864(a1)):s_cbb<oo4447?(a1=cbb0000006009(a6),a2=cbb0000006010(a7),a1=a1|a2,a8=cbb0000006011(a1),a1=cbb0000006012(a2),a2=cbb0000006013(a3),a1=a1&a2,a3=cbb0000006014(a1)):(a2=cbb0000001436([]),a1=cbb0000001437(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):s_cbb<oo4449?s_cbb<oo4450?(a1=cbb0000006983(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000006984(a2),a2=cbb0000006985(a3),a1=a1&a2,a3=cbb0000006986(a1)):(a1=cbb000000957(a5),a2=cbb000000958(a9),a1=a2<a1,a6=cbb000000959(a1),a1=cbb000000960(a5),a2=cbb000000961(a6),a1=a2!==a1,a1=cbb000000962(a1)):(a1=cbb0000002237(a2),a2=cbb0000002238(a1),a1=a2==a1,a4=cbb0000002239(a1),a1=cbb0000002240(a3),a2=cbb0000002241(a4),a3=cbb0000002242(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):s_cbb<oo4452?s_cbb<oo4453?s_cbb<oo4454?(a1=cbb0000006269(a2),a2=cbb0000006270(a1),a1=a1/a2,a3=cbb0000006271(a1),function(){debugger;}()):(a1=cbb0000003877(a6),a2=cbb0000003878(a1),a1=a1>>>a2,a2=cbb0000003879(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003880()):a4.splice(oo4806,oo4806,cbb0000003881());}a1=cbb0000003882(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a2=cbb0000004034(allthis),a1=cbb0000004035(a3),a2=cbb0000004036(a1),a1=a1<a2,a1=cbb0000004037(a1)):s_cbb<oo4464?s_cbb<oo4465?(a1=cbb0000006337(a5),a2=cbb0000006338(a6),a1=a2*a1,a7=cbb0000006339(a1),a1=cbb0000006340(a2),a2=cbb0000006341(a1),a1=a1/a2,a3=cbb0000006342(a1)):(all=cbbb,a1=cbb0000007361(cbbb),a1=cbb0000007362(a3),a2=cbb0000007363(a4),a1=a1%a2,a5=cbb0000007364(a1)):(a5=cbb0000002731(a1),a2=cbb0000002732(void a5),a1=cbb0000002733(a3),a2=cbb0000002734(a3),a1=a1>a2,a2=cbb0000002735(a1));}else if(s_cbb<oo4466){if(s_cbb<oo4467){if(s_cbb<oo4468){if(s_cbb<oo4469){if(s_cbb<oo4470){if(s_cbb<oo4471){if(s_cbb<oo4472){if(s_cbb<oo4473){a1=cbb0000007400(a6);a2=cbb0000007401(a7);a1=a1|a2;a8=cbb0000007402(a1);all=cbbb;a1=cbb0000007403(cbbb);}else{a1=cbb000000981(a4);a2=cbb000000982(a1);a1=a1>=a2;a2=cbb000000983(a1);return;}}else s_cbb<oo4474?(a1=cbb0000004980(cbbb),a2=cbb0000004981(a1),a1=a1<=a2,a5=cbb0000004982(a1),a1=cbb0000004983(a3),a2=cbb0000004984(a3),a1=a1>a2,a2=cbb0000004985(a1)):(a2=cbb0000002623(allthis),function(){i=[];a2=cbb0000002624(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}());}else s_cbb<oo4475?s_cbb<oo4476?(a1=cbb0000001669(a4),a2=cbb0000001670(a5),a1=a1-a2,a6=cbb0000001671(a1),function(){a1=cbb0000001672(a2);a2=cbb0000001673(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001674(a1);}()):(a1=cbb0000005046(a3),a2=cbb0000005047(a1),a1=a1<a2,a1=cbb0000005048(a1),a1=cbb0000005049(a3),a2=cbb0000005050(a3),a1=a1>a2,a2=cbb0000005051(a1)):(a1=shuz[start++],a2=cbb0000008022(constantPool[a1]),a1=cbb0000008023(a6),a2=cbb0000008024(a1),a1=a1>>>a2,a2=cbb0000008025(a1));}else s_cbb<oo4477?s_cbb<oo4478?s_cbb<oo4479?(a1=cbb0000002052({}),a1=cbb0000002053(a6),a2=cbb0000002054(a7),a1=a1|a2,a8=cbb0000002055(a1)):(a1=cbb0000005527(a2),a2=cbb0000005528(a3),a1=a1&a2,a3=cbb0000005529(a1),a1=cbb0000005530(a5),a2=cbb0000005531(a9),a1=a2<a1,a6=cbb0000005532(a1)):s_cbb<oo4480?(a2=cbb0000003988(allthis),a1=cbb0000003989(a5),a2=cbb0000003990(a9),a1=a2<a1,a6=cbb0000003991(a1)):(a1=cbb0000005385(a2),a2=cbb0000005386(a3),a1=a1&a2,a3=cbb0000005387(a1),a1=cbb0000005388(cbbb),a2=cbb0000005389(a1),a1=a1<=a2,a5=cbb0000005390(a1)):s_cbb<oo4481?s_cbb<oo4482?(a1=cbb0000001995(a6),a2=cbb0000001996(a1),a1=a1>>>a2,a2=cbb0000001997(a1),a2=cbb0000001998([])):(a1=cbb0000001177(a3),a2=cbb0000001178(a4),a1=a1>>a2,a5=cbb0000001179(a1),j=cbb0000001180(a1),j2=cbb0000001181(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a1=shuz[start++],a2=cbb0000007519(a3),a3=cbb0000007520(a1),a2[constantPool[a1]]=a3,a1=cbb0000007521(),a2=cbb0000007522(),a1=a2!=a1,cbb0000007523(a1));}else if(s_cbb<oo4483){s_cbb<oo4484?s_cbb<oo4485?s_cbb<oo4486?(a1=shuz[start++],start+=a1,a1=cbb0000001502(a2),a2=cbb0000001503(a1),a1=a1+a2,a1=cbb0000001504(a1)):(a1=shuz[start++],a2=cbb0000008097(constantPool[a1]),a1=cbb0000008098(a2),a2=cbb0000008099(a1),a1=a2-a1,a1=cbb0000008100(a1)):s_cbb<oo4487?(a1=cbb000000365(a2),a2=cbb000000366(a1),a1=a2-a1,a1=cbb000000367(a1),a1=cbb000000368(a4),a2=cbb000000369(a5),a1=a1-a2,a6=cbb000000370(a1)):(a5=cbb0000003626(a2),a2=cbb0000003627(-a5),a1=cbb0000003628(a3),a2=cbb0000003629(a3),a1=a1>a2,a2=cbb0000003630(a1)):s_cbb<oo4488?s_cbb<oo4489?(a1=cbb0000003662(a3),a2=cbb0000003663(a4),a1=a1%a2,a5=cbb0000003664(a1),a5=cbb0000003665(a2),a2=cbb0000003666(-a5)):(a1=cbb000000640(a6),a2=cbb000000641(a1),a1=a1>>>a2,a2=cbb000000642(a1),a1=cbb000000643(a5),a2=cbb000000644(a3),a3=cbb000000645(a1),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1):(function(){a1=cbb0000001645(a2);a2=cbb0000001646(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001647(a1);}(),a1=cbb0000001648(a3),a2=cbb0000001649(a1),a1=a1<a2,a1=cbb0000001650(a1));}else if(s_cbb<oo4490){if(s_cbb<oo4491){if(s_cbb<oo4492){a1=cbb0000007094(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a1=cbb0000007095();a2=cbb0000007096();a1=a2!=a1;cbb0000007097(a1);}else{a1=cbb0000008419(a5);a2=cbb0000008420(a6);a1=a2!==a1;a1=cbb0000008421(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008422(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo4505?(a1=cbb0000004843(a2),a2=cbb0000004844(a1),a1=a2==a1,a4=cbb0000004845(a1),a2=cbb0000004846([])):(a1=cbb0000002986(a5),a2=cbb0000002987(a6),a1=a2!==a1,a1=cbb0000002988(a1),a1=cbb0000002989(a5),a2=cbb0000002990(a6),a1=a2!==a1,a1=cbb0000002991(a1));}else if(s_cbb<oo4506){s_cbb<oo4507?(a1=cbb0000002773(a2),a2=cbb0000002774(a1),a1=a2===a1,a3=cbb0000002775(a1),a5=cbb0000002776(a1),a2=cbb0000002777(void a5)):(a1=cbb0000001657(a5),a2=cbb0000001658(a6),a1=a2!==a1,a1=cbb0000001659(a1),function(){a1=cbb0000001660(a2);a2=cbb0000001661(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001662(a1);}());}else{a2=cbb0000003422(allthis);a1=cbb0000003423(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003424(a1)):a9=oo4829;;}}else if(s_cbb<oo4509){if(s_cbb<oo4510){s_cbb<oo4511?s_cbb<oo4512?s_cbb<oo4513?(a1=cbb0000004812(a2),a2=cbb0000004813(a1),a1=a2==a1,a4=cbb0000004814(a1),a2=cbb0000004815([])):(a1=cbb0000004604(a2),a2=cbb0000004605(a1),a1=a1/a2,a3=cbb0000004606(a1),a1=cbb0000004607(a3),a2=cbb0000004608(a1),a2.push(a1),a1=cbb0000004609(a2)):s_cbb<oo4514?(a1=cbb0000006079(a3),a2=cbb0000006080(a1),a1=a1<a2,a1=cbb0000006081(a1),a1=cbb0000006082(a6),a2=cbb0000006083(a7),a1=a1|a2,a8=cbb0000006084(a1)):(function(){a1=shuz[start++];a3=cbb0000001728(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001729(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001730(a4);}(),a1=cbb0000001731(a6),a2=cbb0000001732(a7),a1=a1|a2,a8=cbb0000001733(a1)):s_cbb<oo4518?s_cbb<oo4519?(a1=cbb000000928(a5),a2=cbb000000929(a9),a1=a2<a1,a6=cbb000000930(a1),a1=cbb000000931(a5),a2=cbb000000932(a6),a1=a2!==a1,a1=cbb000000933(a1)):(a1=cbb0000006418(a2),a2=cbb0000006419(a1),a1=a1/a2,a3=cbb0000006420(a1),a1=cbb0000006421(a2),a2=cbb0000006422(a1),a1=a2==a1,a4=cbb0000006423(a1)):(a1=cbb0000007834(a6),a2=cbb0000007835(a7),a1=a1|a2,a8=cbb0000007836(a1),a1=cbb0000007837(a4),a2=cbb0000007838(a5),a1=a1-a2,a6=cbb0000007839(a1));}else if(s_cbb<oo4520){if(s_cbb<oo4521){if(s_cbb<oo4522){a1=cbb0000006977(a9);a3=shuz[start++];a1[constantPool[a3]]+=oo4828;a2=cbb0000006978([]);}else{a1=cbb0000007060(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;a5=cbb0000007061(a3);a4=cbb0000007062(!a5);}}else s_cbb<oo4525?(function(){debugger;}(),a1=cbb0000004346(a2),a2=cbb0000004347(a3),a3=cbb0000004348(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004349(a3)):(a1=cbb0000002294(a2),a2=cbb0000002295(a1),a1=a2-a1,a1=cbb0000002296(a1),a5=cbb0000002297(a3),a4=cbb0000002298(!a5));}else s_cbb<oo4526?s_cbb<oo4527?(a1=cbb0000002824(a6),a2=cbb0000002825(a1),a1=a1>>>a2,a2=cbb0000002826(a1),a1=cbb0000002827(a3),a2=cbb0000002828(a1),a3=delete a2[a1],a1=cbb0000002829(a3)):(a1=cbb0000007740(a3),a2=cbb0000007741(a4),a1=a1^a2,a5=cbb0000007742(a1),a1=cbb0000007743(a4),a2=cbb0000007744(a5),a1=a1-a2,a6=cbb0000007745(a1)):(a1=cbb0000004759(a2),a2=cbb0000004760(a1),a1=a2==a1,a4=cbb0000004761(a1),a1=cbb0000004762(a4),a2=cbb0000004763(a5),a1=a1-a2,a6=cbb0000004764(a1));}else s_cbb<oo4528?s_cbb<oo4529?s_cbb<oo4530?s_cbb<oo4531?(a1=cbb0000004232(a3),a2=cbb0000004233(a1),a1=a1<a2,a1=cbb0000004234(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004235(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004236(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004237(a4);}()):(a1=cbb0000007049(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000007050(a2),a2=cbb0000007051(a3),a1=a1&a2,a3=cbb0000007052(a1)):s_cbb<oo4540?(a5=cbb0000003771(a3),a1=cbb0000003772(typeof a5),a1=cbb0000003773(a3),a2=cbb0000003774(a4),a1=a1>>a2,a5=cbb0000003775(a1)):(a1=cbb0000001845(a2),a2=cbb0000001846(a1),a1=a2-a1,a1=cbb0000001847(a1),function(){a1=shuz[start++];a3=cbb0000001848(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000001849(a6));}a3==window.setTimeout?a4=setTimeout(...args):a3==window.atob?a4=atob(...args):a3==window.clearInterval?a4=clearInterval(...args):a3==window.setInterval?a4=setInterval(...args):a3==window.RegExp?a4=RegExp(...args):a3==window.alert?a4=alert(...args):a4=a3.apply(all,args);a2=cbb0000001850(a4);}()):s_cbb<oo4544?s_cbb<oo4545?(a1=cbb0000004622({}),a1=cbb0000004623(a3),a2=cbb0000004624(a1),a2.push(a1),a1=cbb0000004625(a2)):(function(){a1=cbb0000001681(a2);a2=cbb0000001682(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001683(a1);}(),a1=cbb0000001684(a3),a2=cbb0000001685(a3),a1=a1>a2,a2=cbb0000001686(a1)):(a1=cbb0000003162(a5),a2=cbb0000003163(a6),a1=a2!==a1,a1=cbb0000003164(a1),a1=cbb0000003165(a2),a2=cbb0000003166(a1),a1=a2===a1,a3=cbb0000003167(a1)):s_cbb<oo4546?s_cbb<oo4547?s_cbb<oo4548?(function(){a1=cbb0000002431(a2);throw a1;}(),a1=cbb0000002432({})):(a1=shuz[start++],start+=a1,a1=cbb0000001508(a6),a2=cbb0000001509(a1),a1=a1>>>a2,a2=cbb0000001510(a1)):s_cbb<oo4549?(a1=cbb0000002869(a3),a2=cbb0000002870(a4),a1=a1>>a2,a5=cbb0000002871(a1),a1=cbb0000002872(a3),a2=cbb0000002873(a1),a3=delete a2[a1],a1=cbb0000002874(a3)):(a1=cbb0000002422(a2),a2=cbb0000002423(a3),a1=a1&a2,a3=cbb0000002424(a1),a5=cbb0000002425(a3),a4=cbb0000002426(!a5)):s_cbb<oo4550?s_cbb<oo4551?(a1=cbb0000004956(a3),a2=cbb0000004957(a3),a1=a1>a2,a2=cbb0000004958(a1),a1=cbb0000004959(a4),a2=cbb0000004960(a1),a1=a1>=a2,a2=cbb0000004961(a1)):(a1=cbb0000003284(a3),a2=cbb0000003285(a2),a1=a1<<a2,a1=cbb0000003286(a1),a1=cbb0000003287(a3),a3=shuz[start++],a1[constantPool[a3]]-=oo4828):(a5=cbb0000006582(a3),a4=cbb0000006583(!a5),a1=cbb0000006584(a3),a2=cbb0000006585(a4),a1=a1^a2,a5=cbb0000006586(a1));}else if(s_cbb<oo4553){if(s_cbb<oo4554){if(s_cbb<oo4555){s_cbb<oo4556?s_cbb<oo4557?s_cbb<oo4558?(a1=cbb0000005175(a4),a2=cbb0000005176(a1),a1=a1>=a2,a2=cbb0000005177(a1),a1=cbb0000005178(a4),a2=cbb0000005179(a5),a1=a1-a2,a6=cbb0000005180(a1)):(a1=cbb0000005864(a3),a2=cbb0000005865(a1),a1=a1<a2,a1=cbb0000005866(a1),a1=cbb0000005867(a3),a2=cbb0000005868(a4),a1=a1>>a2,a5=cbb0000005869(a1)):s_cbb<oo4559?(all=cbbb,a1=cbb0000007378(cbbb),a2=cbb0000007379([])):(a1=cbb0000006184(a3),a2=cbb0000006185(a2),a1=a1<<a2,a1=cbb0000006186(a1),a1=cbb0000006187(a2),a2=cbb0000006188(a3),a1=a1&a2,a3=cbb0000006189(a1)):s_cbb<oo4560?s_cbb<oo4561?(a5=cbb000000117(a1),a2=cbb000000118(void a5),a1=cbb000000119(a3),a2=cbb000000120(a1),a1=a1 in a2,a1=cbb000000121(a1)):(a1=cbb0000004920(a3),a2=cbb0000004921(a3),a1=a1>a2,a2=cbb0000004922(a1),a1=cbb0000004923(a3),a2=cbb0000004924(a2),a1=a1<<a2,a1=cbb0000004925(a1)):(a1=cbb0000007209(a3),a2=cbb0000007210(a1),a1=a1<a2,a1=cbb0000007211(a1),a1=cbb0000007212(a3),a2=cbb0000007213(a4),a1=a1%a2,a5=cbb0000007214(a1));}else if(s_cbb<oo4562){s_cbb<oo4563?s_cbb<oo4564?(a1=cbb0000004827(a2),a2=cbb0000004828(a1),a1=a2-a1,a1=cbb0000004829(a1),a1=cbb0000004830(a2),a2=cbb0000004831(a1),a1=a2==a1,a4=cbb0000004832(a1)):(a1=cbb0000006540(a2),a2=cbb0000006541(a1),a1=a1/a2,a3=cbb0000006542(a1),a1=cbb0000006543(a3),a2=cbb0000006544(a4),a1=a1^a2,a5=cbb0000006545(a1)):s_cbb<oo4565?(a1=cbb0000007828(a6),a2=cbb0000007829(a7),a1=a1|a2,a8=cbb0000007830(a1),a1=cbb0000007831(a4),a2=cbb0000007832(a5),a1=a1-a2,a6=cbb0000007833(a1)):(a1=cbb0000004822(a2),a2=cbb0000004823(a1),a1=a2==a1,a4=cbb0000004824(a1),a5=cbb0000004825(a3),a4=cbb0000004826(!a5));}else if(s_cbb<oo4566){s_cbb<oo4567?(a1=cbb0000002016(a2),a2=cbb0000002017(a1),a1=a1/a2,a3=cbb0000002018(a1),a1=cbb0000002019({})):(a5=cbb0000006314(a1),a2=cbb0000006315(void a5),a1=cbb0000006316(a2),a2=cbb0000006317(a1),a1=a1/a2,a3=cbb0000006318(a1));}else{a1=cbb0000003469(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003470(a1)):a9=oo4829;;a1=cbb0000003471(a6);a2=cbb0000003472(a1);a1=a1>>>a2;a2=cbb0000003473(a1);}}else if(s_cbb<oo4569){s_cbb<oo4570?s_cbb<oo4571?s_cbb<oo4572?(a1=cbb0000004847(a2),a2=cbb0000004848(a1),a1=a2==a1,a4=cbb0000004849(a1),a1=cbb0000004850(a3),a2=cbb0000004851(a2),a1=a1<<a2,a1=cbb0000004852(a1)):(a2=cbb0000002976([]),a1=cbb0000002977(a5),a2=cbb0000002978(a6),a1=a2!==a1,a1=cbb0000002979(a1)):s_cbb<oo4573?(a1=cbb0000006926(a5),a2=cbb0000006927(a6),a1=a2*a1,a7=cbb0000006928(a1),a1=cbb0000006929(a5),a2=cbb0000006930(a6),a1=a2*a1,a7=cbb0000006931(a1)):(a1=cbb0000007252(a3),a2=cbb0000007253(a1),a1=a1<a2,a1=cbb0000007254(a1),a1=cbb0000007255(a2),a2=cbb0000007256(a1),a1=a1+a2,a1=cbb0000007257(a1)):s_cbb<oo4574?s_cbb<oo4575?(a1=cbb0000005261(cbbb),a2=cbb0000005262(a1),a1=a1<=a2,a5=cbb0000005263(a1),a5=cbb0000005264(a1),a2=cbb0000005265(void a5)):(a1=cbb0000003184(a2),a2=cbb0000003185(a1),a1=a2===a1,a3=cbb0000003186(a1),a1=cbb0000003187(a6),a2=cbb0000003188(a7),a1=a1|a2,a8=cbb0000003189(a1)):(a1=cbb0000006069(a6),a2=cbb0000006070(a7),a1=a1|a2,a8=cbb0000006071(a1),a1=cbb0000006072(a4),a2=cbb0000006073(a5),a1=a1-a2,a6=cbb0000006074(a1));}else if(s_cbb<oo4576){if(s_cbb<oo4577){if(s_cbb<oo4578){a2=cbb000000724(allthis);a1=cbb000000725(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000726(a1)):a9=oo4829;;}else{a2=cbb000000882(allthis);a1=cbb000000883(a5);a2=cbb000000884(a9);a1=a2<a1;a6=cbb000000885(a1);}}else if(s_cbb<oo4580){(function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004191(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004192(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004193(a4);})();a1=cbb0000004194(a2);a2=cbb0000004195(a3);a1=a1&a2;a3=cbb0000004196(a1);}else{a1=cbb000000764(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000765(a1)):a9=oo4829;;a1=cbb000000766(a5);a2=cbb000000767(a9);a1=a2<a1;a6=cbb000000768(a1);}}else s_cbb<oo4589?s_cbb<oo4590?(a1=cbb0000007804(a4),a2=cbb0000007805(a5),a1=a1-a2,a6=cbb0000007806(a1),a1=cbb0000007807(a6),a2=cbb0000007808(a7),a1=a1|a2,a8=cbb0000007809(a1)):(a1=cbb0000005237(a5),a2=cbb0000005238(a6),a1=a2!==a1,a1=cbb0000005239(a1),a1=cbb0000005240(a4),a2=cbb0000005241(a1),a1=a1>=a2,a2=cbb0000005242(a1)):(a1=cbb0000002074({}),a2=cbb0000002075(allthis));}else if(s_cbb<oo4591){if(s_cbb<oo4592){if(s_cbb<oo4593){if(s_cbb<oo4594){if(s_cbb<oo4595){a1=cbb0000003168(a2);a2=cbb0000003169(a1);a1=a2===a1;a3=cbb0000003170(a1);a1=cbb0000003171(cbbb);a2=cbb0000003172(a1);a1=a1<=a2;a5=cbb0000003173(a1);}else{a1=cbb0000007102(a2);a2=cbb0000007103(a1);a1=a1+a2;a1=cbb0000007104(a1);a1=cbb0000007105(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else s_cbb<oo4597?(a1=cbb00000010(a3),a2=cbb00000011(a1),a1=a1 in a2,a1=cbb00000012(a1),a1=cbb00000013(a2),a2=cbb00000014(a1),a1=a1+a2,a1=cbb00000015(a1)):(a1=cbb0000007599(a2),a2=cbb0000007600(a1),a1=a1+a2,a1=cbb0000007601(a1),a2=cbb0000007602([]));}else s_cbb<oo4598?s_cbb<oo4599?(a1=cbb0000003236(a2),a2=cbb0000003237(a1),a1=a1/a2,a3=cbb0000003238(a1),a1=cbb0000003239(a2),a2=cbb0000003240(a1),a1=a2===a1,a3=cbb0000003241(a1)):(a1=cbb0000007580(a2),a2=cbb0000007581(a1),a1=a1+a2,a1=cbb0000007582(a1),function(){debugger;}()):(function(){debugger;}(),a1=shuz[start++],a2=cbb0000008038(constantPool[a1]));}else if(s_cbb<oo4600){s_cbb<oo4601?s_cbb<oo4602?(a1=cbb0000007633(a2),a2=cbb0000007634(a1),a1=a2===a1,a3=cbb0000007635(a1),a1=cbb0000007636(a2),a2=cbb0000007637(a1),a1=a1+a2,a1=cbb0000007638(a1)):(a5=cbb0000002657(a1),a2=cbb0000002658(void a5),a1=cbb0000002659(a5),a2=cbb0000002660(a6),a1=a2!==a1,a1=cbb0000002661(a1)):s_cbb<oo4603?(a1=cbb0000006902(a5),a2=cbb0000006903(a6),a1=a2*a1,a7=cbb0000006904(a1),a1=cbb0000006905(a3),a2=cbb0000006906(a4),a1=a1%a2,a5=cbb0000006907(a1)):(a1=cbb0000006272(a2),a2=cbb0000006273(a1),a1=a1/a2,a3=cbb0000006274(a1),a1=cbb0000006275(a5),a2=cbb0000006276(a6),a1=a2!==a1,a1=cbb0000006277(a1));}else if(s_cbb<oo4604){if(s_cbb<oo4605){a1=cbb000000682(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000683(a1)):a9=oo4829;;a5=cbb000000684(a1);a2=cbb000000685(void a5);}else{a1=cbb0000007125(a3);a2=cbb0000007126(a4);a1=a1^a2;a5=cbb0000007127(a1);a1=cbb0000007128(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else{a1=cbb0000003435(a3);a2=cbb0000003436(a3);a1=a1>a2;a2=cbb0000003437(a1);a1=cbb0000003438(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003439(a1)):a9=oo4829;;}}else if(s_cbb<oo4609){s_cbb<oo4610?s_cbb<oo4611?s_cbb<oo4612?(a1=shuz[start++],a2=cbb0000008107(constantPool[a1]),a1=cbb0000008108(a2),a2=cbb0000008109(a3),a1=a1&a2,a3=cbb0000008110(a1)):(a1=cbb0000003688(a6),a2=cbb0000003689(a7),a1=a1|a2,a8=cbb0000003690(a1),a5=cbb0000003691(a3),a1=cbb0000003692(typeof a5)):s_cbb<oo4613?(a1=cbb0000006979(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000006980(a2),a2=cbb0000006981(a1),a1=a2==a1,a4=cbb0000006982(a1)):(function(){i=[];a2=cbb0000002593(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002594(a5),a2=cbb0000002595(a6),a1=a2!==a1,a1=cbb0000002596(a1)):s_cbb<oo4615?s_cbb<oo4616?(a1=cbb0000002875(a3),a2=cbb0000002876(a1),a3=delete a2[a1],a1=cbb0000002877(a3),a1=cbb0000002878(a3),a2=cbb0000002879(a4),a1=a1%a2,a5=cbb0000002880(a1)):(a1=cbb0000006143(a3),a2=cbb0000006144(a2),a1=a1<<a2,a1=cbb0000006145(a1),a1=cbb0000006146(a4),a2=cbb0000006147(a1),a1=a1>=a2,a2=cbb0000006148(a1)):(a1=cbb0000007968(),a2=cbb0000007969(),a1=a2!=a1,cbb0000007970(a1),a1=shuz[start++],a2=cbb0000007971(a1));}else if(s_cbb<oo4617){if(s_cbb<oo4618){s_cbb<oo4619?(a1=cbb000000195(),a2=cbb000000196(),a1=a2!=a1,cbb000000197(a1),a1=cbb000000198(a5),a2=cbb000000199(a9),a1=a2<a1,a6=cbb000000200(a1)):(a1=cbb0000005543(a2),a2=cbb0000005544(a3),a1=a1&a2,a3=cbb0000005545(a1),a1=cbb0000005546(),a2=cbb0000005547(),a1=a2!=a1,cbb0000005548(a1));}else{a1=cbb0000007110(a4);a2=cbb0000007111(a5);a1=a1-a2;a6=cbb0000007112(a1);a1=cbb0000007113(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}}else s_cbb<oo4621?s_cbb<oo4622?(a1=cbb0000006558(a5),a2=cbb0000006559(a6),a1=a2!==a1,a1=cbb0000006560(a1),a1=cbb0000006561(a3),a2=cbb0000006562(a4),a1=a1^a2,a5=cbb0000006563(a1)):(a1=cbb0000006885(a5),a2=cbb0000006886(a6),a1=a2*a1,a7=cbb0000006887(a1),a1=cbb0000006888(a3),a2=cbb0000006889(a4),a1=a1^a2,a5=cbb0000006890(a1)):(a1=cbb0000004167(a6),a2=cbb0000004168(a1),a1=a1>>>a2,a2=cbb0000004169(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004170(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004171(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004172(a4);}());}else if(s_cbb<oo4630){if(s_cbb<oo4631){if(s_cbb<oo4632){if(s_cbb<oo4633){if(s_cbb<oo4634){if(s_cbb<oo4635){if(s_cbb<oo4636){a1=cbb0000003520(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003521(a1)):a9=oo4829;;a1=cbb0000003522();a2=cbb0000003523();a1=a2!=a1;cbb0000003524(a1);}else{a5=cbb0000002289(a3);a4=cbb0000002290(!a5);a1=cbb0000002291(a3);a2=cbb0000002292(a3);a1=a1>a2;a2=cbb0000002293(a1);}}else s_cbb<oo4638?(function(){a1=cbb0000001557(a2);a2=cbb0000001558(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001559(a1);}(),a1=cbb0000001560(a4),a2=cbb0000001561(a5),a1=a1-a2,a6=cbb0000001562(a1)):(a1=cbb0000001304(a3),a2=cbb0000001305(a2),a1=a1<<a2,a1=cbb0000001306(a1),function(){debugger;}());}else s_cbb<oo4639?s_cbb<oo4640?(a1=cbb0000005621(a5),a2=cbb0000005622(a9),a1=a2<a1,a6=cbb0000005623(a1),a1=cbb0000005624(a6),a2=cbb0000005625(a1),a1=a1>>>a2,a2=cbb0000005626(a1)):(a2=cbb0000004569([]),a5=cbb0000004570(a3),a1=cbb0000004571(~a5)):(a2=cbb0000001975([]),a1=cbb0000001976(cbbb),a2=cbb0000001977(a1),a1=a1<=a2,a5=cbb0000001978(a1));}else s_cbb<oo4641?s_cbb<oo4642?s_cbb<oo4643?(a2=cbb0000003659(allthis),a5=cbb0000003660(a2),a2=cbb0000003661(-a5)):(a1=cbb0000002830(a4),a2=cbb0000002831(a5),a1=a1-a2,a6=cbb0000002832(a1),a1=cbb0000002833(a3),a2=cbb0000002834(a1),a3=delete a2[a1],a1=cbb0000002835(a3)):s_cbb<oo4644?(a1=cbb0000006938(a5),a2=cbb0000006939(a6),a1=a2*a1,a7=cbb0000006940(a1),a2=cbb0000006941(allthis)):(function(){a1=cbb0000001599(a2);a2=cbb0000001600(a3);try{a1=a2[a1];}catch(e){a1=window[a1];}all=a2;a1=cbb0000001601(a1);}(),a1=cbb0000001602(a2),a2=cbb0000001603(a1),a1=a2-a1,a1=cbb0000001604(a1)):s_cbb<oo4645?s_cbb<oo4646?(a1=cbb0000007524(a2),a2=cbb0000007525(a1),a1=a1/a2,a3=cbb0000007526(a1),a1=shuz[start++],a2=cbb0000007527(a3),a3=cbb0000007528(a1),a2[constantPool[a1]]=a3):(a5=cbb0000006783(a1),a2=cbb0000006784(void a5),a1=cbb0000006785(a5),a2=cbb0000006786(a6),a1=a2*a1,a7=cbb0000006787(a1)):(all=cbbb,a1=cbb0000007426(cbbb),function(){debugger;}());}else s_cbb<oo4647?s_cbb<oo4648?s_cbb<oo4649?s_cbb<oo4650?(a1=cbb0000007651(a3),a2=cbb0000007652(a3),a1=a1>a2,a2=cbb0000007653(a1),a1=cbb0000007654(a2),a2=cbb0000007655(a1),a1=a1+a2,a1=cbb0000007656(a1)):(a1=cbb0000002806(a2),a2=cbb0000002807(a1),a1=a2-a1,a1=cbb0000002808(a1),a1=cbb0000002809(a3),a2=cbb0000002810(a1),a3=delete a2[a1],a1=cbb0000002811(a3)):s_cbb<oo4651?(function(){a1=cbb0000002497(a2);throw a1;}(),function(){debugger;}()):(a5=cbb0000002675(a1),a2=cbb0000002676(void a5),a1=cbb0000002677(a2),a2=cbb0000002678(a1),a1=a1/a2,a3=cbb0000002679(a1)):s_cbb<oo4652?s_cbb<oo4653?(a1=cbb000000288(a6),a2=cbb000000289(a7),a1=a1|a2,a8=cbb000000290(a1),a1=cbb000000291(),a2=cbb000000292(),a1=a2!=a1,cbb000000293(a1)):(function(){i=[];a2=cbb0000002567(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}(),a1=cbb0000002568(a3),a2=cbb0000002569(a4),a1=a1%a2,a5=cbb0000002570(a1)):(a1=cbb0000007816(a4),a2=cbb0000007817(a5),a1=a1-a2,a6=cbb0000007818(a1),a1=cbb0000007819(a6),a2=cbb0000007820(a1),a1=a1>>>a2,a2=cbb0000007821(a1)):s_cbb<oo4654?s_cbb<oo4655?s_cbb<oo4656?(a5=cbb0000003985(a1),a2=cbb0000003986(void a5),a2=cbb0000003987(allthis)):(a1=cbb0000002901(a3),a2=cbb0000002902(a1),a3=delete a2[a1],a1=cbb0000002903(a3),a1=cbb0000002904(a6),a2=cbb0000002905(a1),a1=a1>>>a2,a2=cbb0000002906(a1)):s_cbb<oo4657?(a5=cbb0000005767(a1),a2=cbb0000005768(void a5),a1=cbb0000005769(a3),a2=cbb0000005770(a4),a1=a1>>a2,a5=cbb0000005771(a1)):(a1=cbb0000006987(a3),a2=cbb0000006988(a3),a1=a1>a2,a2=cbb0000006989(a1),a1=cbb0000006990(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828):s_cbb<oo4659?s_cbb<oo4660?(a1=shuz[start++],start+=a1,a1=cbb0000001539(a2),a2=cbb0000001540(a1),a1=a2-a1,a1=cbb0000001541(a1)):(a1=cbb0000004944(),a2=cbb0000004945(),a1=a2!=a1,cbb0000004946(a1),a1=cbb0000004947(a3),a2=cbb0000004948(a3),a1=a1>a2,a2=cbb0000004949(a1)):(a5=cbb0000003604(a2),a2=cbb0000003605(-a5),a1=cbb0000003606(a5),a2=cbb0000003607(a9),a1=a2<a1,a6=cbb0000003608(a1));}else if(s_cbb<oo4661){if(s_cbb<oo4662){if(s_cbb<oo4663){if(s_cbb<oo4664){s_cbb<oo4665?(a1=cbb0000005801(a3),a2=cbb0000005802(a4),a1=a1>>a2,a5=cbb0000005803(a1),a5=cbb0000005804(a1),a2=cbb0000005805(void a5)):(a1=cbb0000003254(a2),a2=cbb0000003255(a1),a1=a2===a1,a3=cbb0000003256(a1),a1=cbb0000003257(a3),a2=cbb0000003258(a4),a1=a1%a2,a5=cbb0000003259(a1));}else if(s_cbb<oo4666){a1=cbb0000007106(a3);a2=cbb0000007107(a1);a1=a1<a2;a1=cbb0000007108(a1);a1=cbb0000007109(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}else{(function(){debugger;})();a1=cbb0000001354(a4);a2=cbb0000001355(a5);a1=a1-a2;a6=cbb0000001356(a1);}}else s_cbb<oo4668?s_cbb<oo4669?(a1=cbb0000004085(a6),a2=cbb0000004086(a1),a1=a1>>>a2,a2=cbb0000004087(a1),a2=cbb0000004088(allthis)):(a2=cbb0000001891(allthis),a2=cbb0000001892([])):(a1=cbb0000004816(a2),a2=cbb0000004817(a1),a1=a2==a1,a4=cbb0000004818(a1),a1=cbb0000004819(a2),a2=cbb0000004820(a1),a1=a2==a1,a4=cbb0000004821(a1));}else if(s_cbb<oo4670){if(s_cbb<oo4671){if(s_cbb<oo4672){a1=cbb0000001059(a5);a2=cbb0000001060(a6);a1=a2*a1;a7=cbb0000001061(a1);return;}else{a1=cbb0000006723(a3);a2=cbb0000006724(a4);a1=a1%a2;a5=cbb0000006725(a1);a1=cbb0000006726(a3);a2=cbb0000006727(a4);a1=a1^a2;a5=cbb0000006728(a1);}}else s_cbb<oo4673?(a1=cbb0000001414(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001415(a3),a2=cbb0000001416(a3),a1=a1>a2,a2=cbb0000001417(a1)):(a5=cbb0000004524(a3),a1=cbb0000004525(~a5),a1=cbb0000004526(a5),a2=cbb0000004527(a9),a1=a2<a1,a6=cbb0000004528(a1));}else if(s_cbb<oo4675){s_cbb<oo4676?(a1=cbb0000003937(a4),a2=cbb0000003938(a1),a1=a1>=a2,a2=cbb0000003939(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003940()):a4.splice(oo4806,oo4806,cbb0000003941());}a1=cbb0000003942(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a1=cbb0000001453(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820,a1=cbb0000001454(a2),a2=cbb0000001455(a3),a1=a1&a2,a3=cbb0000001456(a1));}else{a1=cbb0000003502(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003503(a1)):a9=oo4829;;a1=cbb0000003504({});}}else if(s_cbb<oo4688){if(s_cbb<oo4689){if(s_cbb<oo4690){if(s_cbb<oo4691){a1=cbb0000007098(a2);a2=cbb0000007099(a1);a1=a2-a1;a1=cbb0000007100(a1);a1=cbb0000007101(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}else{a1=cbb0000008337(a3);a2=cbb0000008338(a2);a1=a1<<a2;a1=cbb0000008339(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008340(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else if(s_cbb<oo4704){a1=cbb0000004018(a4);a2=cbb0000004019(a5);a1=a1-a2;a6=cbb0000004020(a1);a2=cbb0000004021(allthis);}else{a1=cbb0000008395(a3);a2=cbb0000008396(a2);a1=a1<<a2;a1=cbb0000008397(a1);a8=duei.CFf;for(a1=oo4806;a1<a8;a1++){a7=cbb0000008398(a1);let g=a7;all[g]=function(){let g2=new cshduei();offnew==oo4828?(offnew=oo4806,a9={"variablePool":{},"arguments":arguments,"zhili":[]},a9.__proto__=cbbb,cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this)):(a9={"variablePool":{},"arguments":arguments,"zhili":[]},cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool),cltothis(a9,a9['variablePool'],oo4828),cltothis(a9['zhili'],changlc[cbbb.variablePool[g]].zhili),a9.__proto__=cbbb,a6=cbb_jsvmp(a9,g2,oo4806,a9['zhili'],arguments,this));if(g2.CFf==oo4806){return undefined;}else{let h=g2.cf(a1);for(;oo4828==oo4828;){if(g2.CFf==oo4806){break;}else{a9=g2.cf(a1);}}return h;}};}}}else s_cbb<oo4716?s_cbb<oo4717?(a1=cbb000000318(),a2=cbb000000319(),a1=a2!=a1,cbb000000320(a1),a1=cbb000000321(a2),a2=cbb000000322(a1),a1=a2-a1,a1=cbb000000323(a1)):(a1=cbb0000003174(a3),a2=cbb0000003175(a4),a1=a1^a2,a5=cbb0000003176(a1),a1=cbb0000003177(a2),a2=cbb0000003178(a1),a1=a2===a1,a3=cbb0000003179(a1)):(a1=cbb0000005709(a5),a2=cbb0000005710(a6),a1=a2*a1,a7=cbb0000005711(a1),a1=cbb0000005712(a6),a2=cbb0000005713(a1),a1=a1>>>a2,a2=cbb0000005714(a1));}else s_cbb<oo4718?s_cbb<oo4719?s_cbb<oo4720?(a1=cbb0000004877(a5),a2=cbb0000004878(a6),a1=a2*a1,a7=cbb0000004879(a1),a1=cbb0000004880(a2),a2=cbb0000004881(a1),a1=a2==a1,a4=cbb0000004882(a1)):(a1=cbb0000006846(a4),a2=cbb0000006847(a5),a1=a1-a2,a6=cbb0000006848(a1),a1=cbb0000006849(a5),a2=cbb0000006850(a6),a1=a2*a1,a7=cbb0000006851(a1)):(a1=cbb000000904(a5),a2=cbb000000905(a9),a1=a2<a1,a6=cbb000000906(a1),a1=cbb000000907(a4),a2=cbb000000908(a1),a1=a1>=a2,a2=cbb000000909(a1)):s_cbb<oo4721?s_cbb<oo4722?(a1=cbb0000002848(a2),a2=cbb0000002849(a1),a1=a2===a1,a3=cbb0000002850(a1),a1=cbb0000002851(a3),a2=cbb0000002852(a1),a3=delete a2[a1],a1=cbb0000002853(a3)):(a1=cbb0000001182(a2),a2=cbb0000001183(a1),a1=a1+a2,a1=cbb0000001184(a1),j=cbb0000001185(a1),j2=cbb0000001186(a2),j2.variablePool!=undefined?getproto(j2,j,a7):j2[j]=a1):(a1=cbb0000004101(),a2=cbb0000004102(),a1=a2!=a1,cbb0000004103(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004104(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004105(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004106(a4);}());}else if(s_cbb<oo4730){if(s_cbb<oo4731){if(s_cbb<oo4732){if(s_cbb<oo4733){s_cbb<oo4734?s_cbb<oo4735?(a1=cbb0000004616(a4),a2=cbb0000004617(a5),a1=a1-a2,a6=cbb0000004618(a1),a1=cbb0000004619(a3),a2=cbb0000004620(a1),a2.push(a1),a1=cbb0000004621(a2)):(a1=cbb0000002044(a3),a2=cbb0000002045(a3),a1=a1>a2,a2=cbb0000002046(a1),a1=cbb0000002047({})):s_cbb<oo4736?(a1=cbb0000008333(a4),a2=cbb0000008334(a1),a1=a1>=a2,a2=cbb0000008335(a1),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008336(a2);cbbb[a9]=argsList[a2];}}()):(a1=cbb0000003146(a6),a2=cbb0000003147(a1),a1=a1>>>a2,a2=cbb0000003148(a1),a1=cbb0000003149(a2),a2=cbb0000003150(a1),a1=a2===a1,a3=cbb0000003151(a1));}else if(s_cbb<oo4738){if(s_cbb<oo4739){a1=cbb0000007380(a5);a2=cbb0000007381(a6);a1=a2!==a1;a1=cbb0000007382(a1);all=cbbb;a1=cbb0000007383(cbbb);}else{a1=cbb0000003525(a6);a2=cbb0000003526(a1);a1=a1>>>a2;a2=cbb0000003527(a1);a1=cbb0000003528(a3);a2=shuz[start++];!a1?(start+=a2,a4=cbb0000003529(a1)):a9=oo4829;;}}else{a1=cbb0000001418(a2);a3=shuz[start++];a1?start+=a3:a9=oo4820;a1=cbb0000001419(a4);a2=cbb0000001420(a5);a1=a1-a2;a6=cbb0000001421(a1);}}else s_cbb<oo4742?s_cbb<oo4743?s_cbb<oo4744?(a1=cbb0000006995(a9),a3=shuz[start++],a1[constantPool[a3]]+=oo4828,a1=cbb0000006996(a3),a2=cbb0000006997(a4),a1=a1^a2,a5=cbb0000006998(a1)):(a1=shuz[start++],a2=cbb0000008067(constantPool[a1]),a2=cbb0000008068(allthis)):s_cbb<oo4746?(a1=cbb0000006699(cbbb),a2=cbb0000006700(a1),a1=a1<=a2,a5=cbb0000006701(a1),a1=cbb0000006702(a3),a2=cbb0000006703(a4),a1=a1%a2,a5=cbb0000006704(a1)):(a1=cbb0000002134(a3),a2=cbb0000002135(a4),a3=cbb0000002136(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002137(a6),a2=cbb0000002138(a1),a1=a1>>>a2,a2=cbb0000002139(a1)):s_cbb<oo4747?s_cbb<oo4748?(a1=cbb0000006103(a3),a2=cbb0000006104(a2),a1=a1<<a2,a1=cbb0000006105(a1),a1=cbb0000006106(a2),a2=cbb0000006107(a1),a1=a2===a1,a3=cbb0000006108(a1)):(a5=cbb0000008318(a1),a2=cbb0000008319(void a5),function(){a1=duei.CFf;for(a2=oo4806;a2<a1;a2++){a9=cbb0000008320(a2);cbbb[a9]=argsList[a2];}}()):(a5=cbb0000004470(a3),a1=cbb0000004471(~a5),a1=cbb0000004472(a6),a2=cbb0000004473(a1),a1=a1>>>a2,a2=cbb0000004474(a1));}else if(s_cbb<oo4750){s_cbb<oo4751?s_cbb<oo4752?s_cbb<oo4753?(a1=cbb0000002812(a3),a2=cbb0000002813(a4),a1=a1%a2,a5=cbb0000002814(a1),a1=cbb0000002815(a3),a2=cbb0000002816(a1),a3=delete a2[a1],a1=cbb0000002817(a3)):(a1=cbb0000002110(a3),a2=cbb0000002111(a4),a3=cbb0000002112(a5),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000002113(a3),a2=cbb0000002114(a4),a1=a1>>a2,a5=cbb0000002115(a1)):s_cbb<oo4754?(a1=cbb0000006459(a2),a2=cbb0000006460(a1),a1=a1/a2,a3=cbb0000006461(a1),a1=cbb0000006462(a3),a2=cbb0000006463(a4),a1=a1^a2,a5=cbb0000006464(a1)):(a1=cbb0000001485(a4),a2=cbb0000001486(a1),a1=a1>=a2,a2=cbb0000001487(a1),a1=shuz[start++],start+=a1):s_cbb<oo4755?s_cbb<oo4756?(a1=cbb0000005561(a2),a2=cbb0000005562(a3),a1=a1&a2,a3=cbb0000005563(a1),a1=cbb0000005564(a2),a2=cbb0000005565(a1),a1=a2==a1,a4=cbb0000005566(a1)):(a1=cbb0000005567(a2),a2=cbb0000005568(a3),a1=a1&a2,a3=cbb0000005569(a1),a1=cbb0000005570(a3),a2=cbb0000005571(a4),a1=a1%a2,a5=cbb0000005572(a1)):(a1=cbb0000004214(a3),a2=cbb0000004215(a4),a1=a1>>a2,a5=cbb0000004216(a1),function(){cbbb['for_in_xh_cbb_list']=i;a1=shuz[start++];a3=cbb0000004217(a5);args=[];for(a2=oo4806;a2<a1;a2++){args.splice(oo4806,oo4806,cbb0000004218(a6));}offnew=oo4828;a3==RegExp?a4=new RegExp(args[oo4806],args[oo4828]):a4=new a3(...args);offnew=oo4806;a1=cbb0000004219(a4);}());}else if(s_cbb<oo4764){s_cbb<oo4765?s_cbb<oo4766?(a1=cbb0000002665(a3),a2=cbb0000002666(a4),a1=a1^a2,a5=cbb0000002667(a1),a5=cbb0000002668(a1),a2=cbb0000002669(void a5)):(a1=cbb0000003815(a2),a2=cbb0000003816(a1),a1=a2===a1,a3=cbb0000003817(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003818()):a4.splice(oo4806,oo4806,cbb0000003819());}a1=cbb0000003820(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):s_cbb<oo4776?(a5=cbb0000007258(a3),a4=cbb0000007259(!a5),a1=cbb0000007260(a3),a2=cbb0000007261(a1),a1=a1<a2,a1=cbb0000007262(a1)):(a1=cbb000000433(a2),a2=cbb000000434(a1),a1=a2==a1,a4=cbb000000435(a1),a1=cbb000000436(a2),a2=cbb000000437(a1),a1=a2-a1,a1=cbb000000438(a1));}else if(s_cbb<oo4777){if(s_cbb<oo4778){a1=cbb0000001212(a5);a2=cbb0000001213(a6);a1=a2*a1;a7=cbb0000001214(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}else{a1=cbb0000002790({});a1=cbb0000002791(a3);a2=cbb0000002792(a1);a3=delete a2[a1];a1=cbb0000002793(a3);}}else{a1=cbb0000004408();a2=cbb0000004409();a1=a2!=a1;cbb0000004410(a1);a1=cbb0000004411(a2);a2=cbb0000004412(a3);a3=cbb0000004413(a4);a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1;a1=cbb0000004414(a3);}}else if(s_cbb<oo4785){s_cbb<oo4786?s_cbb<oo4787?s_cbb<oo4788?s_cbb<oo4789?(a1=cbb0000005806(a3),a2=cbb0000005807(a4),a1=a1>>a2,a5=cbb0000005808(a1),a1=cbb0000005809(a3),a2=cbb0000005810(a2),a1=a1<<a2,a1=cbb0000005811(a1)):(function(){debugger;}(),a1=cbb0000001351(a3),a2=cbb0000001352(a2),a1=a1<<a2,a1=cbb0000001353(a1)):s_cbb<oo4790?(a5=cbb0000003683(a3),a1=cbb0000003684(typeof a5),a1=cbb0000003685(a3),a2=cbb0000003686(a2),a1=a1<<a2,a1=cbb0000003687(a1)):(a1=cbb0000002512(),a2=cbb0000002513(),a1=a2!=a1,cbb0000002514(a1),function(){a1=cbb0000002515(a2);throw a1;}()):s_cbb<oo4791?s_cbb<oo4792?(function(){debugger;}(),a1=cbb0000001327(a3),a2=cbb0000001328(a1),a1=a1<a2,a1=cbb0000001329(a1)):(a1=cbb0000005970(a6),a2=cbb0000005971(a7),a1=a1|a2,a8=cbb0000005972(a1),a1=cbb0000005973(a3),a2=cbb0000005974(a4),a1=a1>>a2,a5=cbb0000005975(a1)):(a1=cbb000000134(),a2=cbb000000135(),a1=a2!=a1,cbb000000136(a1),a1=cbb000000137(a3),a2=cbb000000138(a1),a1=a1 in a2,a1=cbb000000139(a1)):s_cbb<oo4793?s_cbb<oo4794?s_cbb<oo4795?(function(){debugger;}(),a1=cbb0000006465(a3),a2=cbb0000006466(a4),a1=a1^a2,a5=cbb0000006467(a1)):(a1=cbb0000001929(a4),a2=cbb0000001930(a1),a1=a1>=a2,a2=cbb0000001931(a1),a2=cbb0000001932([])):s_cbb<oo4796?(a5=cbb000000934(a1),a2=cbb000000935(void a5),a1=cbb000000936(a5),a2=cbb000000937(a9),a1=a2<a1,a6=cbb000000938(a1)):(all=cbbb,a1=cbb0000007431(cbbb),a1=cbb0000007432(a2),a2=cbb0000007433(a1),a1=a1+a2,a1=cbb0000007434(a1)):s_cbb<oo4797?s_cbb<oo4798?(a1=cbb00000044({}),a1=cbb00000045(a3),a2=cbb00000046(a1),a1=a1 in a2,a1=cbb00000047(a1)):(a1=cbb0000003901(),a2=cbb0000003902(),a1=a2!=a1,cbb0000003903(a1),function(){a1=shuz[start++]*oo4801;a3=[];a4=[];for(a2=oo4806;a2<a1;a2++){a2<a1/oo4801?a3.splice(oo4806,oo4806,cbb0000003904()):a4.splice(oo4806,oo4806,cbb0000003905());}a1=cbb0000003906(a2);for(a2=oo4806;a2<a3.length;a2++){if(a4[a2]==a1){start+=a3[a2];break;}else if(a4[a2]==null){start+=a3[a2];break;}else{a9=oo4829;}}}()):(a1=cbb0000001398(a3),a2=cbb0000001399(a2),a1=a1<<a2,a1=cbb0000001400(a1),a1=cbb0000001401(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820);}else if(s_cbb<oo4809){if(s_cbb<oo4810){if(s_cbb<oo4811){if(s_cbb<oo4812){a1=cbb000000701(a5);a2=shuz[start++];a1?(start+=a2,a7=cbb000000702(a1)):a9=oo4829;;a1=cbb000000703(a2);a2=cbb000000704(a1);a1=a2-a1;a1=cbb000000705(a1);}else{(function(){i=[];a2=cbb0000002601(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;})();a1=cbb0000002602({});}}else s_cbb<oo4814?(a1=cbb0000004301(a2),a2=cbb0000004302(a3),a3=cbb0000004303(a4),a3.variablePool!=undefined?getproto(a3,a2,a1):a3[a2]=a1,a1=cbb0000004304(a3),a1=cbb0000004305(a2),a2=cbb0000004306(a1),a1=a2-a1,a1=cbb0000004307(a1)):(a1=shuz[start++],start+=a1,a2=cbb0000001496([]));}else s_cbb<oo4815?s_cbb<oo4816?(a1=cbb0000004002(cbbb),a2=cbb0000004003(a1),a1=a1<=a2,a5=cbb0000004004(a1),a2=cbb0000004005(allthis)):(a1=cbb0000007846(a4),a2=cbb0000007847(a5),a1=a1-a2,a6=cbb0000007848(a1),a1=cbb0000007849(a3),a2=cbb0000007850(a4),a1=a1%a2,a5=cbb0000007851(a1)):(a1=cbb0000002784(a3),a2=cbb0000002785(a1),a3=delete a2[a1],a1=cbb0000002786(a3),a1=cbb0000002787(a3),a2=cbb0000002788(a4),a1=a1>>a2,a5=cbb0000002789(a1));}else if(s_cbb<oo4817){s_cbb<oo4818?s_cbb<oo4819?(a1=cbb0000001465(a3),a2=cbb0000001466(a4),a1=a1%a2,a5=cbb0000001467(a1),a1=cbb0000001468(a2),a3=shuz[start++],a1?start+=a3:a9=oo4820):(a1=cbb0000002633(a5),a2=cbb0000002634(a6),a1=a2*a1,a7=cbb0000002635(a1),function(){i=[];a2=cbb0000002636(a2);for(a1 in a2){i.push(a1);}cbbb['for_in_xh_cbb_list']=i;}()):(a1=cbb0000001962(a5),a2=cbb0000001963(a6),a1=a2!==a1,a1=cbb0000001964(a1),a2=cbb0000001965([]));}else if(s_cbb<oo4821){if(s_cbb<oo4822){a1=cbb0000007151(a2);a2=cbb0000007152(a1);a1=a2-a1;a1=cbb0000007153(a1);a1=cbb0000007154(a6);a2=shuz[start++];!a1?start+=a2:a9=oo4829;;}else{a1=cbb0000001227(a2);a2=cbb0000001228(a1);a1=a2===a1;a3=cbb0000001229(a1);a2=shuz[start++];a3=shuz[start++];a4=shuz[start++];try{a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});start=a2+start;if(a6=="-90_cbb"){return a6;}}catch(e){a7=e;start=a2+start;a6=cbb_jsvmp(a1,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"args":args,"argsList":argsList,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}}finally{if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=a3+start;a6=cbb_jsvmp(a3,start,start,duei,args.length,oo4828,{"shuz":shuz,"cbbb":cbbb,"allthis":allthis,"argsList":argsList,"args":args,"duei":duei,"all":all,"a7":a7});if(a6=="-90_cbb"){return a6;}else{a9=oo4829;}start=start+a4;}}}else{a1=cbb0000002463(a6);a2=cbb0000002464(a7);a1=a1|a2;a8=cbb0000002465(a1);(function(){a1=cbb0000002466(a2);throw a1;})();}}};})();window.offnew=0;window['variablePool']={};window['zhili']=[];cltothis(window['variablePool'],changlc.awcbb_yhh_fun0.variablePool);cltothis(window['zhili'],changlc.awcbb_yhh_fun0.zhili);cbb_jsvmp(window,new cshduei(),0,changlc.awcbb_yhh_fun0.zhili);
2833844911/cy_jsvmp
29,525
outsrc/out.js
(function(){ var cywindow = this; var codeOfmyfun = [10,18,1,23,10,6,1815,23,10,7,1815,10,0,23,22,8,23,10,8,181,23,10,6,181,10,9,181,24,25,240,10,0,23,22,10,23,10,10,181,23,10,7,181,10,11,28,24,25,47,23,10,1,23,10,10,181,290,23,10,1,181,23,10,12,23,10,10,181,290,23,10,12,181,1810,23,10,10,10,4,23,10,10,181,20,90,23,10,10,181,1810,23,10,12,181,190,-61,23,10,3,181,10,4,20,10,4,53,192,2,190,10,23,10,3,10,2,290,23,10,3,181,1820,23,10,3,181,23,10,6,181,23,10,8,181,181,23,10,7,181,23,10,3,181,28,10,13,27,20,23,10,5,181,23,10,3,181,181,23,10,3,181,23,10,14,181,10,15,181,150,4,23,10,5,181,23,10,3,181,23,10,5,181,23,10,3,181,181,23,10,6,181,23,10,8,181,181,20,23,10,7,181,23,10,3,181,28,10,13,27,20,290,23,10,5,181,23,10,3,181,181,1821,23,10,3,23,10,3,181,10,4,291,90,23,10,3,181,23,10,16,23,10,8,181,290,23,10,16,181,1810,23,10,8,10,4,23,10,8,181,20,90,23,10,8,181,1810,23,10,16,181,190,-254,10,17,1814,1816,10,18,1,23,10,19,1815,23,10,20,1815,10,0,23,22,21,23,10,21,181,23,10,19,181,10,9,181,24,25,234,10,0,23,22,22,23,10,22,181,23,10,20,181,10,23,28,24,25,47,23,10,1,23,10,22,181,290,23,10,1,181,23,10,24,23,10,22,181,290,23,10,24,181,1810,23,10,22,10,4,23,10,22,181,20,90,23,10,22,181,1810,23,10,24,181,190,-61,23,10,3,181,10,4,20,10,4,53,192,2,190,10,23,10,3,10,2,290,23,10,3,181,1820,23,10,3,181,23,10,19,181,23,10,21,181,181,23,10,20,181,23,10,3,181,28,20,23,10,5,181,23,10,3,181,181,23,10,3,181,23,10,14,181,10,15,181,150,4,23,10,5,181,23,10,3,181,23,10,5,181,23,10,3,181,181,23,10,19,181,23,10,21,181,181,20,23,10,20,181,23,10,3,181,28,20,290,23,10,5,181,23,10,3,181,181,1821,23,10,3,23,10,3,181,10,4,291,90,23,10,3,181,23,10,25,23,10,21,181,290,23,10,25,181,1810,23,10,21,10,4,23,10,21,181,20,90,23,10,21,181,1810,23,10,25,181,190,-248,10,17,1814,1816,10,18,1,23,10,26,1815,23,10,27,1815,10,0,23,22,28,23,10,28,181,23,10,26,181,10,9,181,24,25,249,10,0,23,22,29,23,10,29,181,23,10,27,181,10,30,28,24,25,47,23,10,1,23,10,29,181,290,23,10,1,181,23,10,31,23,10,29,181,290,23,10,31,181,1810,23,10,29,10,4,23,10,29,181,20,90,23,10,29,181,1810,23,10,31,181,190,-61,23,10,3,181,10,4,20,10,4,53,192,2,190,10,23,10,3,10,2,290,23,10,3,181,1820,10,2,23,10,3,181,19,23,10,26,181,23,10,28,181,181,23,10,3,181,20,23,10,27,181,20,23,10,5,181,10,2,23,10,3,181,19,181,23,10,3,181,23,10,14,181,10,15,181,150,4,23,10,5,181,10,2,23,10,3,181,19,23,10,5,181,10,2,23,10,3,181,19,181,23,10,26,181,23,10,28,181,181,20,23,10,3,181,20,23,10,27,181,20,290,23,10,5,181,10,2,23,10,3,181,19,181,1821,23,10,3,23,10,3,181,10,4,291,90,23,10,3,181,23,10,32,23,10,28,181,290,23,10,32,181,1810,23,10,28,10,4,23,10,28,181,20,90,23,10,28,181,1810,23,10,32,181,190,-263,10,0,1814,1816];var constantPool = [0,"c_f_0",29,"c_f_1",1,"c_f_2","c_f_3","c_f_4","c_f_5","length","c_f_7",10,"c_f_8",3,"console","log","c_f_6",90,"cbbiyhh.online","c_f_9","c_f_10","c_f_11","c_f_13",20,"c_f_14","c_f_12","c_f_15","c_f_16","c_f_17","c_f_19",30,"c_f_20","c_f_18","2833844911","c_f_22","udiaudisaoduas","c_f_23","565767","c_f_24","","split","c_f_25","c_f_26","charCodeAt","c_f_27","c_f_28","c_f_29","c_f_30","c_f_31","cbbiyhh_dgggg_opopop","cbb_op","c_f_35","dadasd","fg","cbb_isokk_yhh_very_p","cbb_hu",6786,"cbb_hu2",76,"c_f_33","c_f_34","zhaohao","mima","code","sign","window","global","c_f_21","c_f_36","JSON","stringify"]; var changlc = {"awcbb_yhh_fun0":{"variablePool":{"c_f_0":null,"c_f_1":null,"c_f_2":null,"cbb_hu":"awcbb_yhh_fun1","cbb_hu2":"awcbb_yhh_fun2","cbb_op":"awcbb_yhh_fun3","c_f_21":"awcbb_yhh_fun4","c_f_36":null},"zhili":[10,55,10,57,10,50,10,67,1,10,0,23,22,1,10,2,23,22,3,105,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,10,4,40,23,22,5,23,10,65,181,192,20,23,10,66,181,10,64,23,10,67,181,290,23,10,66,181,10,64,181,190,18,23,10,65,181,10,64,23,10,67,181,290,23,10,65,181,10,64,181,23,10,67,181,150,0,23,22,68,194,23,10,68,181,23,10,14,181,10,15,181,150,1,23,10,68,181,23,10,69,181,10,70,181,150,1,23,10,14,181,10,15,181,150,1]},"awcbb_yhh_fun1":{"variablePool":{"c_f_3":null,"c_f_4":null,"c_f_5":null,"c_f_6":null,"c_f_7":null,"c_f_8":null},"af":0,"isfunmr":1},"awcbb_yhh_fun2":{"variablePool":{"c_f_9":null,"c_f_10":null,"c_f_11":null,"c_f_12":null,"c_f_13":null,"c_f_14":null},"af":274,"isfunmr":1},"awcbb_yhh_fun3":{"variablePool":{"c_f_15":null,"c_f_16":null,"c_f_17":null,"c_f_18":null,"c_f_19":null,"c_f_20":null},"af":542,"isfunmr":1},"awcbb_yhh_fun4":{"variablePool":{"c_f_22":null,"c_f_23":null,"c_f_24":null,"c_f_25":null,"c_f_26":null,"c_f_27":null,"c_f_28":null,"c_f_29":null,"c_f_30":null,"c_f_31":null,"c_f_35":null,"c_f_33":null,"c_f_34":null},"zhili":[1,2,10,33,23,22,34,10,35,23,22,36,10,37,23,22,38,10,39,23,10,34,181,10,40,181,150,1,23,22,41,10,0,23,22,42,23,10,42,181,23,10,41,181,10,9,181,24,25,69,23,10,41,181,23,10,42,181,10,0,23,10,41,181,23,10,42,181,181,10,43,181,150,1,290,23,10,41,181,23,10,42,181,181,23,10,44,23,10,42,181,290,23,10,44,181,1810,23,10,42,10,4,23,10,42,181,20,90,23,10,42,181,1810,23,10,44,181,190,-83,10,39,23,10,36,181,10,40,181,150,1,23,22,45,23,10,42,10,0,290,23,10,42,181,23,10,42,181,23,10,45,181,10,9,181,24,25,69,23,10,45,181,23,10,42,181,10,0,23,10,45,181,23,10,42,181,181,10,43,181,150,1,290,23,10,45,181,23,10,42,181,181,23,10,46,23,10,42,181,290,23,10,46,181,1810,23,10,42,10,4,23,10,42,181,20,90,23,10,42,181,1810,23,10,46,181,190,-83,10,39,23,10,38,181,10,40,181,150,1,23,22,47,23,10,42,10,0,290,23,10,42,181,23,10,42,181,23,10,47,181,10,9,181,24,25,69,23,10,47,181,23,10,42,181,10,0,23,10,47,181,23,10,42,181,181,10,43,181,150,1,290,23,10,47,181,23,10,42,181,181,23,10,48,23,10,42,181,290,23,10,48,181,1810,23,10,42,10,4,23,10,42,181,20,90,23,10,42,181,1810,23,10,48,181,190,-83,194,10,49,23,10,41,181,10,30,23,10,50,181,1812,1813,23,22,51,10,52,23,10,51,181,23,10,14,181,10,15,181,150,2,23,10,50,181,23,10,50,181,10,53,181,10,54,23,10,41,181,10,30,1818,1817,23,10,55,181,23,10,55,181,10,53,181,10,54,23,10,45,181,10,56,1818,1817,23,10,57,181,23,10,57,181,10,53,181,10,54,23,10,47,181,10,58,1818,1817,1812,1819,10,39,23,22,59,23,10,42,10,0,290,23,10,42,181,23,10,42,181,10,30,24,25,57,23,10,59,23,10,59,181,23,10,5,181,23,10,42,181,181,20,90,23,10,59,181,23,10,60,23,10,42,181,290,23,10,60,181,1810,23,10,42,10,4,23,10,42,181,20,90,23,10,42,181,1810,23,10,60,181,190,-66,23,10,5,181,23,10,14,181,10,15,181,150,1,104,10,61,23,10,34,181,45,10,62,23,10,36,181,45,10,63,23,10,38,181,45,10,64,23,10,59,181,45,23,22,51,23,10,51,181,-1]}}; var baoChen = [] function cltothis(cythis, poolList,off){ for (let i in poolList){ if (off == 1){ cythis[i] = undefined }else{ cythis[i] = poolList[i]; } } } function cbb_jsvmp(all, duei, start, shuz, argsList, ogg, op) { var cbb_xc = [] function getproto(s,d,e){ let dt = s; for ( ;1==1;){ if (s.hasOwnProperty(d)){ try{ s[d] = e }catch(e2){ window[d] = e return } break }else{ s = s.__proto__ if (s == undefined || s==null){ window[d] = e return } } } } if (op !==undefined){ var allthis allthis = op['allthis'] duei = op.duei all = op.all shuz = op.shuz argsList = op.argsList var a1,a2,a3,a4,a5,a6,a7,a8,a9,j, j2,i,a10 ; a7 = op.a7 var args = op.args var cbbb = op.cbbb; }else{ var allthis if (ogg !== undefined){ allthis = ogg }else{ allthis = all } var a1,a2,a3,a4,a5,a6,a7,a8,a9,j, j2, i,a10; var args = [] var cbbb = all; } var jsq = 0,og = 0,lp= 0 while (!![]) { if (cbb_xc.length !== 0 && og == 1){ if (lp === 0){ jsq += 1 } if (jsq === 10){ a10 = [] a10.push(a1) a10.push(a2) a10.push(a3) a10.push(a4) a10.push(a5) a10.push(a6) a10.push(a7) a10.push(a8) a10.push(a9) a10.push(duei) a10.push(start) a10.push(cbbb) cbb_xc.splice(0,0, a10) a10 = cbb_xc.pop() a1 = a10[0] a2 = a10[1] a3 = a10[2] a4 = a10[3] a5 = a10[4] a6 = a10[5] a7 = a10[6] a8 = a10[7] a9 = a10[8] duei = a10[9] start = a10[10] cbbb = a10[11] shuz = codeOfmyfun jsq = 0 } } let s_cbb = shuz[start++]; switch(s_cbb){ case 23: all = cbbb duei.push(cbbb) break case 47: duei.push(allthis) break case 36: a1 = duei.pop() a2 = duei.pop() a1 = a2 <= a1; duei.push(a1) break case 37: a1 = duei.pop() a2 = duei.pop() a1 = a2 >= a1; duei.push(a1) break case 38: a1 = duei.pop() a2 = duei.pop() a1 = a2 > a1; duei.push(a1) break case 39: a1 = duei.pop() a2 = duei.pop() a1 = a2 == a1; duei.push(a1) break case 48: a1 = shuz[start++] * 2 a3 = [] a4 = [] for (a2=0; a2< a1;a2++){ if (a2 < a1/2){ a3.splice(0,0,duei.pop()) }else{ a4.splice(0,0,duei.pop()) } } a1 = duei.pop() for (a2=0; a2 < a3.length; a2++){ if (a4[a2] == a1){ start += a3[a2] break } else if (a4[a2] == null){ start += a3[a2] break } } break case 53: a1 = duei.pop() a2 = duei.pop() a1 = a2 === a1; duei.push(a1) break case 54: a1 = duei.pop() a2 = duei.pop() a1 = a2 !== a1; duei.push(a1) break case 550: a1 = duei.pop() a2 = duei.pop() a1 = a2 != a1; duei.push(a1) break case 551: a1 = duei.pop() a2 = duei.pop() a1 = a2 in a1; duei.push(a1) break case 22: a1 = shuz[start++] a2 = duei.pop() a3 = duei.pop() a2[constantPool[a1]] = a3 break case 19: a1 = duei.pop() a2 = duei.pop() a1 = a2 - a1; duei.push(a1) break case 291: a1 = duei.pop() a2 = duei.pop() a1 = a2 - a1; duei.push(a1) break case 20: a1 = duei.pop() a2 = duei.pop() a1 = a2 + a1; duei.push(a1) break case 24: a1 = duei.pop() a2 = duei.pop() a1 = a2 < a1; duei.push(a1) break case 240: a1 = duei.pop() a2 = duei.pop() a1 = a2 < a1; duei.push(a1) break case 27: a1 = duei.pop() a2 = duei.pop() a1 = a1 * a2; duei.push(a1) break case 28: a1 = duei.pop() a2 = duei.pop() a1 = a2 % a1; duei.push(a1) break case 29: a1 = duei.pop() a2 = duei.pop() a1 = a2 ^ a1; duei.push(a1) break case 30: a1 = duei.pop() a2 = duei.pop() a1 = a2 / a1; duei.push(a1) break case 194: debugger; break case 25: a1 = duei.pop() a2 = shuz[start++] if (!a1) { start += a2; } ;break case 31: a1 = duei.pop() a2 = duei.pop() a1 = a2 << a1; duei.push(a1) break case 32: a1 = duei.pop() a2 = duei.pop() a1 = a2 | a1; duei.push(a1) break case 26: a1 = duei.pop() a3 = shuz[start++] a1[ constantPool[a3]] += 1 break case 190: a1 = shuz[start++] start += a1 break case 192: a1 = duei.pop() a3 = shuz[start++] if (a1) { start += a3 } break case 33: a1 = duei.pop() a2 = duei.pop() a1 = a2 >> a1; duei.push(a1) break case 34: a1 = duei.pop() a2 = duei.pop() a1 = a2 >>> a1; duei.push(a1) break case 52: a1 = duei.pop() a3 = shuz[start++] a1[ constantPool[a3]] -= 1 break case 104: duei.push({}) break case 105: duei.push([]) break case 57: i = [] a3 = shuz[start++] a2 = duei.pop() for (a1 in a2){ i.push(a1) } cbbb['for_in_xh_cbb_list'+a3] = i break case 51: a1 = duei.pop() a2 = shuz[start++] if (!a1) { start += a2; duei.push(a1) } ;break case 252: a1 = duei.pop() a2 = shuz[start++] if (a1) { start += a2; duei.push(a1) } ;break case 195: a2 = shuz[start++] a3 = shuz[start++] a4 = shuz[start++] try{ a6 = cbb_jsvmp(a3, start, start, duei, args.length, 1, { "shuz":shuz, "cbbb":cbbb, "allthis":allthis, "argsList":argsList, "args":args, "duei":duei, "all": all, "a7":a7 }) start = a2+start; if (a6 == "-90_cbb"){ return a6 } }catch(e){ a7 = e start = a2+start; a6 = cbb_jsvmp(a1, start, start, duei, args.length, 1, { "shuz":shuz, "cbbb":cbbb, "allthis":allthis, "args":args, "argsList":argsList, "duei":duei, "all": all, "a7":a7 }) if (a6 == "-90_cbb"){ return a6 } }finally{ if (a6 == "-90_cbb"){ return a6 } start = a3+start; a6 = cbb_jsvmp(a3, start, start, duei, args.length, 1, { "shuz":shuz, "cbbb":cbbb, "allthis":allthis, "argsList":argsList, "args":args, "duei":duei, "all": all, "a7":a7 }) if (a6 == "-90_cbb"){ return a6 } start =start+ a4 } break case 35: a1 = duei.pop() a2 = duei.pop() a1 = a2 & a1; duei.push(a1) break case 8: a1 = shuz[start++] a2 = shuz[start++] a1 = new RegExp( constantPool[a1], constantPool[a2]) ; duei.push(a1) break case 10: a1 = shuz[start++] duei.push(constantPool[a1]) break case 11: a1 = shuz[start++] duei.push(a1) break case 58: a1 = duei.pop() throw a1 break case 40: a1 = duei.pop() a2 = duei.pop() a2.push(a1); duei.push(a2) break case 1: a8 = duei.length for (a1=0; a1< a8; a1++){ a7 = duei.pop() if (a7 ==="cbbiyhh.online"){ break } let g = a7 if (changlc[cbbb.variablePool[g]].isfunmr){ a9 = { "variablePool":{}, "fg":changlc[cbbb.variablePool[g]].af } a9.__proto__ = cbbb cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool) cltothis(a9,a9['variablePool'], 1) cbbb[g] = a9; continue } cbbb[g] = function(){ let g2 = [] var huuuu = this if (offnew == 1){ offnew=0 a9 = { "variablePool":{}, "arguments": arguments, "zhili":[] } a9.__proto__ = cbbb cltothis(a9.variablePool,changlc[cbbb.variablePool[g]].variablePool) cltothis(a9,a9['variablePool'], 1) cltothis(a9['zhili'], changlc[cbbb.variablePool[g]].zhili) a6 = cbb_jsvmp(a9, g2, 0, a9['zhili'],arguments, huuuu) return huuuu; }else{ a9 = { "variablePool":{}, "arguments": arguments, "zhili":[] } cltothis(a9['variablePool'],changlc[cbbb.variablePool[g]].variablePool) cltothis(a9,a9['variablePool'], 1) cltothis(a9['zhili'], changlc[cbbb.variablePool[g]].zhili) a9.__proto__ = cbbb a6 = cbb_jsvmp(a9, g2, 0, a9['zhili'],arguments, huuuu) } if (g2.length == 0){ return undefined }else{ let h = g2.pop() for (;1==1;){ if (g2.length == 0){ break }else{ g2.pop() } } return h } } } break case 2: a1 = duei.length for (a2 = 0; a2 < a1; a2++){ a3 = duei.shift() if (argsList[a2] != undefined ){ cbbb[a3] = argsList[a2]; } } break case 90: a1 = duei.pop() a2 = duei.pop() a3 = duei.pop() if (a3.variablePool != undefined){ getproto(a3,a2,a1) }else{ a3[a2] = a1 } break case 290: a1 = duei.pop() a2 = duei.pop() a3 = duei.pop() if (a3.variablePool != undefined){ getproto(a3,a2,a1) }else{ a3[a2] = a1 } break case 44: a5 = duei.pop() duei.push(~a5) break case 49: a5 = duei.pop() duei.push(typeof a5) break case 50: a5 = duei.pop() duei.push(- a5) break case 45: a1 = duei.pop() a2 = duei.pop() a3 = duei.pop() if (a3.variablePool != undefined){ getproto(a3,a2,a1) }else{ a3[a2] = a1 } duei.push(a3) break case 55: a1 = duei.pop() a2 = duei.pop() a3 = delete a2[a1]; duei.push(a3) break case 56: a5 = duei.pop() duei.push(void a5) break case 60: a5 = duei.pop() duei.push(!a5) break case 197: j = duei.pop() j2 = duei.pop() if (j2.variablePool != undefined){ getproto(j2,j,a7) }else{ j2[j] = a1 } break case 46: a1 = shuz[start++] a3 = duei.pop() args = [] for (a2=0; a2<a1; a2++ ){ args.splice(0,0,duei.pop()) } offnew = 1 if (a3 == RegExp) { a4 = new RegExp(args[0], args[1]) } else { a4 = new a3(...args) } offnew = 0 duei.push(a4) break case 150: a1 = shuz[start++] a3 = duei.pop() args = [] for (a2=0; a2<a1; a2++ ){ args.splice(0,0,duei.pop()) } if (a3 == window.setTimeout){ a4 = setTimeout(...args) } else if (a3 == window.atob){ a4 = atob(...args)} else if (a3 == window.clearInterval){ a4 = clearInterval(...args)} else if (a3 == window.setInterval){ a4 = setInterval(...args) } else if (a3 == window.RegExp){ a4 = RegExp(...args) }else if (a3 == window.alert){ a4 = alert(...args) }else{ a4 = a3.apply(all,args) } duei.push(a4) break case 181: a1 = duei.pop(); a2 = duei.pop(); try{ a1 = a2[a1] }catch(e){ a1 = window[a1] } all = a2 duei.push(a1) break case 1810: a1 = duei.pop(); break case 1811: a1 = duei.pop() a2 = duei.pop() a1 = a2 instanceof a1; duei.push(a1) break case 1812: a1 = { "a1": shuz, "a2": start, "a3": duei, "a4": cbbb } baoChen.push(a1) break case 1813: a1 = duei.pop() shuz = codeOfmyfun start = a1['fg'] cbbb = a1 break case 1814: if (og === 1){ a10 = cbb_xc.pop() if (!a10){ a1 = baoChen.pop() shuz = a1.a1 start = a1.a2+1 duei = a1.a3 cbbb = a1.a4 og = 0 jsq = 0 }else { a1 = a10[0] a2 = a10[1] a3 = a10[2] a4 = a10[3] a5 = a10[4] a6 = a10[5] a7 = a10[6] a8 = a10[7] a9 = a10[8] duei = a10[9] start = a10[10] cbbb = a10[11] shuz = codeOfmyfun jsq = 0 } break } a2 = duei.pop() while (1){ a1 = duei.pop() if (a1 === "cbbiyhh_dgggg_opopop"){ break } } break case 1815: a1 = duei.pop() a2 = duei.pop() a3 = duei.pop() if (a2.variablePool != undefined){ getproto(a2,a1,a3) }else{ a2[a1] = a3 } break case 1816: a1 = baoChen.pop() shuz = a1.a1 start = a1.a2+1 cbbb = a1.a4 duei.push(a2) break case 1818: a1 = [] while (1){ a2 = duei.pop() if (a2 === "cbb_isokk_yhh_very_p"){ break } a1.push(a2) } duei.push(a1) break case 1819: og = 1 a10 = cbb_xc.pop() a1 = a10[0] a2 = a10[1] a3 = a10[2] a4 = a10[3] a5 = a10[4] a6 = a10[5] a7 = a10[6] a8 = a10[7] a9 = a10[8] duei = a10[9] start = a10[10] cbbb = a10[11] shuz = codeOfmyfun jsq = 0 break case 1820: lp = 1 break case 1821: lp = 0 break case 1817: a10 = [] a10.push(a1) a10.push(a2) a10.push(a3) a10.push(a4) a10.push(a5) a10.push(a6) a10.push(a7) a10.push(a8) a10.push(a9) a10.push(duei.pop()) a10.push(duei.pop()) a10.push(duei.pop()) cbb_xc.push(a10) break case 200: return default: return "-90_cbb" } } } if (!this.window){var cywindow = {"exports": exports,"require": require,"module":module,"__dirname":__dirname,"__filename":__filename};cywindow.__proto__=global;window=global}; offnew = 0 cywindow['variablePool'] = {} cywindow['zhili'] = [] cltothis(cywindow['variablePool'],changlc.awcbb_yhh_fun0.variablePool) cltothis(cywindow['zhili'], changlc.awcbb_yhh_fun0.zhili) cbb_jsvmp( cywindow, [], 0, changlc.awcbb_yhh_fun0.zhili)})()
27182812/ChatGLM-LLaMA-chinese-insturct
2,084
src/transformers/models/jukebox/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _import_structure = { "configuration_jukebox": [ "JUKEBOX_PRETRAINED_CONFIG_ARCHIVE_MAP", "JukeboxConfig", "JukeboxPriorConfig", "JukeboxVQVAEConfig", ], "tokenization_jukebox": ["JukeboxTokenizer"], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_jukebox"] = [ "JUKEBOX_PRETRAINED_MODEL_ARCHIVE_LIST", "JukeboxModel", "JukeboxPreTrainedModel", "JukeboxVQVAE", "JukeboxPrior", ] if TYPE_CHECKING: from .configuration_jukebox import ( JUKEBOX_PRETRAINED_CONFIG_ARCHIVE_MAP, JukeboxConfig, JukeboxPriorConfig, JukeboxVQVAEConfig, ) from .tokenization_jukebox import JukeboxTokenizer try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_jukebox import ( JUKEBOX_PRETRAINED_MODEL_ARCHIVE_LIST, JukeboxModel, JukeboxPreTrainedModel, JukeboxPrior, JukeboxVQVAE, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
27,789
src/transformers/models/jukebox/configuration_jukebox.py
# coding=utf-8 # Copyright 2022 The OpenAI Team Authors and HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Jukebox configuration""" import copy import os from typing import List, Union from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) JUKEBOX_PRETRAINED_CONFIG_ARCHIVE_MAP = { "openai/jukebox-5b-lyrics": "https://huggingface.co/openai/jukebox-5b-lyrics/blob/main/config.json", "openai/jukebox-1b-lyrics": "https://huggingface.co/openai/jukebox-1b-lyrics/blob/main/config.json", } _LARGE_ATTENTION = [ "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "cross_attention", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "cross_attention", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "cross_attention", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "cross_attention", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "cross_attention", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "cross_attention", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "block_attn", "transpose_block_attn", "prev_block_attn", "cross_attention", ] _RawColumnPreviousRowAttention = ["block_attn", "transpose_block_attn", "prev_block_attn"] _FullDenseAttention = ["dense_attention"] _PrimePrimeDenseAttention = ["prime_attn", "prime_attn", "dense_attn"] def full_dense_attention(layer): return _FullDenseAttention[0] def raw_column_previous_row_attention(layer): return _RawColumnPreviousRowAttention[layer % 3] def large_separated_enc_dec_w_lyrics(layer): return _LARGE_ATTENTION[layer % 79] def enc_dec_with_lyrics(layer): if layer % 16 == 15: return _PrimePrimeDenseAttention[layer % 3] return _RawColumnPreviousRowAttention[layer % 3] ATTENTION_PATTERNS = { "full_dense_attention": full_dense_attention, "raw_column_previous_row_attention": raw_column_previous_row_attention, # Alternate row, column and previous row attn "large_separated_enc_dec_w_lyrics": large_separated_enc_dec_w_lyrics, # Used by large separated_enc_dec model with lyrics "enc_dec_with_lyrics": enc_dec_with_lyrics, # Used by encoder_decoder model with lyrics } class JukeboxPriorConfig(PretrainedConfig): """ This is the configuration class to store the configuration of a [`JukeboxPrior`]. It is used to instantiate a `JukeboxPrior` according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the top level prior from the [openai/jukebox-1b-lyrics](https://huggingface.co/openai/jukebox -1b-lyrics) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: act_fn (`str`, *optional*, defaults to `"quick_gelu"`): Activation function. alignment_head (`int`, *optional*, defaults to 2): Head that is responsible of the alignment between lyrics and music. Only used to compute the lyric to audio alignment alignment_layer (`int`, *optional*, defaults to 68): Index of the layer that is responsible of the alignment between lyrics and music. Only used to compute the lyric to audio alignment attention_multiplier (`float`, *optional*, defaults to 0.25): Multiplier coefficient used to define the hidden dimension of the attention layers. 0.25 means that 0.25*width of the model will be used. attention_pattern (`str`, *optional*, defaults to `"enc_dec_with_lyrics"`): Which attention pattern to use for the decoder/ attn_dropout (`int`, *optional*, defaults to 0): Dropout probability for the post-attention layer dropout in the decoder. attn_res_scale (`bool`, *optional*, defaults to `False`): Whether or not to scale the residuals in the attention conditioner block. blocks (`int`, *optional*, defaults to 64): Number of blocks used in the `block_attn`. A sequence of length seq_len is factored as `[blocks, seq_len // blocks]` in the `JukeboxAttention` layer. conv_res_scale (`int`, *optional*): Whether or not to scale the residuals in the conditioner block. Since the top level prior does not have a conditioner, the default value is to None and should not be modified. num_layers (`int`, *optional*, defaults to 72): Number of layers of the transformer architecture. emb_dropout (`int`, *optional*, defaults to 0): Embedding dropout used in the lyric decoder. encoder_config (`JukeboxPriorConfig`, *optional*) : Configuration of the encoder which models the prior on the lyrics. encoder_loss_fraction (`float`, *optional*, defaults to 0.4): Multiplication factor used in front of the lyric encoder loss. hidden_size (`int`, *optional*, defaults to 2048): Hidden dimension of the attention layers. init_scale (`float`, *optional*, defaults to 0.2): Initialization scales for the prior modules. is_encoder_decoder (`bool`, *optional*, defaults to `True`): Whether or not the prior is an encoder-decoder model. In case it is not, and `nb_relevant_lyric_tokens` is greater than 0, the `encoder` args should be specified for the lyric encoding. mask (`bool`, *optional*, defaults to `False`): Whether or not to mask the previous positions in the attention. max_duration (`int`, *optional*, defaults to 600): Maximum supported duration of the generated song in seconds. max_nb_genres (`int`, *optional*, defaults to 1): Maximum number of genres that can be used to condition the model. merged_decoder (`bool`, *optional*, defaults to `True`): Whether or not the decoder and the encoder inputs are merged. This is used for the separated encoder-decoder architecture metadata_conditioning (`bool`, *optional*, defaults to `True)`: Whether or not to condition on the artist and genre metadata. metadata_dims (`List[int]`, *optional*, defaults to `[604, 7898]`): Number of genres and the number of artists that were used to train the embedding layers of the prior models. min_duration (`int`, *optional*, defaults to 0): Minimum duration of the generated audio on which the model was trained. mlp_multiplier (`float`, *optional*, defaults to 1.0): Multiplier coefficient used to define the hidden dimension of the MLP layers. 0.25 means that 0.25*width of the model will be used. music_vocab_size (`int`, *optional*, defaults to 2048): Number of different music tokens. Should be similar to the `JukeboxVQVAEConfig.nb_discrete_codes`. n_ctx (`int`, *optional*, defaults to 6144): Number of context tokens for each prior. The context tokens are the music tokens that are attended to when generating music tokens. n_heads (`int`, *optional*, defaults to 2): Number of attention heads. nb_relevant_lyric_tokens (`int`, *optional*, defaults to 384): Number of lyric tokens that are used when sampling a single window of length `n_ctx` res_conv_depth (`int`, *optional*, defaults to 3): Depth of the `JukeboxDecoderConvBock` used to upsample the previously sampled audio in the `JukeboxMusicTokenConditioner`. res_conv_width (`int`, *optional*, defaults to 128): Width of the `JukeboxDecoderConvBock` used to upsample the previously sampled audio in the `JukeboxMusicTokenConditioner`. res_convolution_multiplier (`int`, *optional*, defaults to 1): Multiplier used to scale the `hidden_dim` of the `JukeboxResConv1DBlock`. res_dilation_cycle (`int`, *optional*): Dilation cycle used to define the `JukeboxMusicTokenConditioner`. Usually similar to the ones used in the corresponding level of the VQVAE. The first prior does not use it as it is not conditioned on upper level tokens. res_dilation_growth_rate (`int`, *optional*, defaults to 1): Dilation grow rate used between each convolutionnal block of the `JukeboxMusicTokenConditioner` res_downs_t (`List[int]`, *optional*, defaults to `[3, 2, 2]`): Downsampling rates used in the audio conditioning network res_strides_t (`List[int]`, *optional*, defaults to `[2, 2, 2]`): Striding used in the audio conditioning network resid_dropout (`int`, *optional*, defaults to 0): Residual dropout used in the attention pattern. sampling_rate (`int`, *optional*, defaults to 44100): Sampling rate used for training. spread (`int`, *optional*): Spread used in the `summary_spread_attention` pattern timing_dims (`int`, *optional*, defaults to 64): Dimension of the timing embedding. zero_out (`bool`, *optional*, defaults to `False`): Whether or not to zero out convolution weights when initializing. """ model_type = "jukebox_prior" attribute_map = { "max_position_embeddings": "n_positions", "num_attention_heads": "n_head", } def __init__( self, act_fn="quick_gelu", level=0, alignment_head=2, alignment_layer=68, attention_multiplier=0.25, attention_pattern="enc_dec_with_lyrics", attn_dropout=0, attn_res_scale=False, blocks=64, conv_res_scale=None, num_layers=72, emb_dropout=0, encoder_config=None, encoder_loss_fraction=0.4, hidden_size=2048, init_scale=0.2, is_encoder_decoder=True, lyric_vocab_size=80, mask=False, max_duration=600, max_nb_genres=1, merged_decoder=True, metadata_conditioning=True, metadata_dims=[604, 7898], min_duration=0, mlp_multiplier=1.0, music_vocab_size=2048, n_ctx=6144, n_heads=2, nb_relevant_lyric_tokens=384, res_conv_depth=3, res_conv_width=128, res_convolution_multiplier=1, res_dilation_cycle=None, res_dilation_growth_rate=1, res_downs_t=[3, 2, 2], res_strides_t=[2, 2, 2], resid_dropout=0, sampling_rate=44100, spread=None, timing_dims=64, zero_out=False, **kwargs, ): self.act_fn = act_fn self.alignment_head = alignment_head self.alignment_layer = alignment_layer self.attention_multiplier = attention_multiplier self.attention_pattern = attention_pattern self.attn_dropout = attn_dropout self.attn_res_scale = attn_res_scale self.blocks = blocks self.conv_res_scale = conv_res_scale self.num_layers = num_layers self.emb_dropout = emb_dropout self.music_vocab_size = music_vocab_size if encoder_config is not None: self.encoder_config = JukeboxPriorConfig(**encoder_config) else: self.encoder_config = None self.encoder_loss_fraction = encoder_loss_fraction self.init_scale = init_scale self.is_encoder_decoder = is_encoder_decoder self.lyric_vocab_size = lyric_vocab_size self.level = level self.mask = mask self.max_duration = max_duration self.max_nb_genres = max_nb_genres self.merged_decoder = merged_decoder self.metadata_conditioning = metadata_conditioning self.metadata_dims = metadata_dims self.min_duration = min_duration self.mlp_multiplier = mlp_multiplier self.n_ctx = n_ctx self.n_heads = n_heads self.nb_relevant_lyric_tokens = nb_relevant_lyric_tokens self.res_conv_depth = res_conv_depth self.res_conv_width = res_conv_width self.res_convolution_multiplier = res_convolution_multiplier self.res_dilation_cycle = res_dilation_cycle self.res_dilation_growth_rate = res_dilation_growth_rate self.res_downs_t = res_downs_t self.res_strides_t = res_strides_t self.resid_dropout = resid_dropout self.sampling_rate = sampling_rate self.spread = spread self.timing_dims = timing_dims self.hidden_size = hidden_size self.zero_out = zero_out @classmethod def from_pretrained( cls, pretrained_model_name_or_path: Union[str, os.PathLike], level=0, **kwargs ) -> "PretrainedConfig": config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs) # get the prior config dict if we are loading from JukeboxConfig if config_dict.get("model_type") == "jukebox": config_dict = config_dict[f"prior_{level}"] if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type: logger.warning( f"You are using a model of type {config_dict['model_type']} to instantiate a model of type " f"{cls.model_type}. This is not supported for all configurations of models and can yield errors." ) return cls.from_dict(config_dict, **kwargs) def to_dict(self): """ Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`]. Returns: `Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance, """ output = copy.deepcopy(self.__dict__) output["encoder_config"] = self.encoder_config.to_dict() if self.encoder_config is not None else None output["model_type"] = self.__class__.model_type return output class JukeboxVQVAEConfig(PretrainedConfig): """ This is the configuration class to store the configuration of a [`JukeboxVQVAE`]. It is used to instantiate a `JukeboxVQVAE` according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the VQVAE from [openai/jukebox-1b-lyrics](https://huggingface.co/openai/jukebox-1b-lyrics) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: act_fn (`str`, *optional*, defaults to `"relu"`): Activation function of the model. nb_discrete_codes (`int`, *optional*, defaults to 2048): Number of codes of the VQVAE. commit (`float`, *optional*, defaults to 0.02): Commit loss multiplier. conv_input_shape (`int`, *optional*, defaults to 1): Number of audio channels. conv_res_scale (`bool`, *optional*, defaults to `False`): Whether or not to scale the residuals of the `JukeboxResConv1DBlock`. embed_dim (`int`, *optional*, defaults to 64): Embedding dimension of the codebook vectors. hop_fraction (`List[int]`, *optional*, defaults to `[0.125, 0.5, 0.5]`): Fraction of non-intersecting window used when continuing the sampling process. levels (`int`, *optional*, defaults to 3): Number of hierarchical levels that used in the VQVAE. lmu (`float`, *optional*, defaults to 0.99): Used in the codebook update, exponential moving average coefficient. For more detail refer to Appendix A.1 of the original [VQVAE paper](https://arxiv.org/pdf/1711.00937v2.pdf) multipliers (`List[int]`, *optional*, defaults to `[2, 1, 1]`): Depth and width multipliers used for each level. Used on the `res_conv_width` and `res_conv_depth` res_conv_depth (`int`, *optional*, defaults to 4): Depth of the encoder and decoder block. If no `multipliers` are used, this is the same for each level. res_conv_width (`int`, *optional*, defaults to 32): Width of the encoder and decoder block. If no `multipliers` are used, this is the same for each level. res_convolution_multiplier (`int`, *optional*, defaults to 1): Scaling factor of the hidden dimension used in the `JukeboxResConv1DBlock`. res_dilation_cycle (`int`, *optional*): Dilation cycle value used in the `JukeboxResnet`. If an int is used, each new Conv1 block will have a depth reduced by a power of `res_dilation_cycle`. res_dilation_growth_rate (`int`, *optional*, defaults to 3): Resnet dilation growth rate used in the VQVAE (dilation_growth_rate ** depth) res_downs_t (`List[int]`, *optional*, defaults to `[3, 2, 2]`): Downsampling rate for each level of the hierarchical VQ-VAE. res_strides_t (`List[int]`, *optional*, defaults to `[2, 2, 2]`): Stride used for each level of the hierarchical VQ-VAE. sample_length (`int`, *optional*, defaults to 1058304): Provides the max input shape of the VQVAE. Is used to compute the input shape of each level. init_scale (`float`, *optional*, defaults to 0.2): Initialization scale. zero_out (`bool`, *optional*, defaults to `False`): Whether or not to zero out convolution weights when initializing. """ model_type = "jukebox_vqvae" def __init__( self, act_fn="relu", nb_discrete_codes=2048, commit=0.02, conv_input_shape=1, conv_res_scale=False, embed_dim=64, hop_fraction=[0.125, 0.5, 0.5], levels=3, lmu=0.99, multipliers=[2, 1, 1], res_conv_depth=4, res_conv_width=32, res_convolution_multiplier=1, res_dilation_cycle=None, res_dilation_growth_rate=3, res_downs_t=[3, 2, 2], res_strides_t=[2, 2, 2], sample_length=1058304, init_scale=0.2, zero_out=False, **kwargs, ): self.hop_fraction = hop_fraction self.conv_input_shape = conv_input_shape self.sample_length = sample_length # VQVAE parameters (all used) self.levels = levels self.embed_dim = embed_dim self.nb_discrete_codes = nb_discrete_codes self.res_conv_width = res_conv_width self.res_conv_depth = res_conv_depth self.res_convolution_multiplier = res_convolution_multiplier self.res_dilation_growth_rate = res_dilation_growth_rate self.res_dilation_cycle = res_dilation_cycle self.multipliers = multipliers self.res_downs_t = res_downs_t self.res_strides_t = res_strides_t self.lmu = lmu self.commit = commit self.conv_res_scale = conv_res_scale self.act_fn = act_fn self.init_scale = init_scale self.zero_out = zero_out @classmethod def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig": config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs) # get the text config dict if we are loading from CLIPConfig if config_dict.get("model_type") == "jukebox": config_dict = config_dict["vqvae_config"] if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type: logger.warning( f"You are using a model of type {config_dict['model_type']} to instantiate a model of type " f"{cls.model_type}. This is not supported for all configurations of models and can yield errors." ) return cls.from_dict(config_dict, **kwargs) class JukeboxConfig(PretrainedConfig): """ This is the configuration class to store the configuration of a [`JukeboxModel`]. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Instantiating a configuration with the defaults will yield a similar configuration to that of [openai/jukebox-1b-lyrics](https://huggingface.co/openai/jukebox-1b-lyrics) architecture. The downsampling and stride are used to determine downsampling of the input sequence. For example, downsampling = (5,3), and strides = (2, 2) will downsample the audio by 2^5 = 32 to get the first level of codes, and 2**8 = 256 to get the second level codes. This is mostly true for training the top level prior and the upsamplers. Args: vqvae_config (`JukeboxVQVAEConfig`, *optional*): Configuration for the `JukeboxVQVAE` model. prior_config_list (`List[JukeboxPriorConfig]`, *optional*): List of the configs for each of the `JukeboxPrior` of the model. The original architecture uses 3 priors. nb_priors (`int`, *optional*, defaults to 3): Number of prior models that will sequentially sample tokens. Each prior is conditional auto regressive (decoder) model, apart from the top prior, which can include a lyric encoder. The available models were trained using a top prior and 2 upsampler priors. sampling_rate (`int`, *optional*, defaults to 44100): Sampling rate of the raw audio. timing_dims (`int`, *optional*, defaults to 64): Dimensions of the JukeboxRangeEmbedding layer which is equivalent to traditional positional embedding layer. The timing embedding layer converts the absolute and relative position in the currently sampled audio to a tensor of length `timing_dims` that will be added to the music tokens. min_duration (`int`, *optional*, defaults to 0): Minimum duration of the audios to generate max_duration (`float`, *optional*, defaults to 600.0): Maximum duration of the audios to generate max_nb_genres (`int`, *optional*, defaults to 5): Maximum number of genres that can be used to condition a single sample. metadata_conditioning (`bool`, *optional*, defaults to `True`): Whether or not to use metadata conditioning, corresponding to the artist, the genre and the min/maximum duration. Example: ```python >>> from transformers import JukeboxModel, JukeboxConfig >>> # Initializing a Jukebox configuration >>> configuration = JukeboxConfig() >>> # Initializing a model from the configuration >>> model = JukeboxModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ``` """ model_type = "jukebox" is_composition = True def __init__( self, vqvae_config=None, prior_config_list=None, nb_priors=3, sampling_rate=44100, timing_dims=64, min_duration=0, max_duration=600.0, max_nb_genres=5, metadata_conditioning=True, **kwargs, ): if vqvae_config is None: vqvae_config = {} logger.info("vqvae_config is None. initializing the JukeboxVQVAE with default values.") self.vqvae_config = JukeboxVQVAEConfig(**vqvae_config) if prior_config_list is not None: self.prior_configs = [JukeboxPriorConfig(**prior_config) for prior_config in prior_config_list] else: self.prior_configs = [] for prior_idx in range(nb_priors): prior_config = kwargs.pop(f"prior_{prior_idx}", None) if prior_config is None: prior_config = {} logger.info( f"prior_{prior_idx}'s config is None. Initializing the JukeboxPriorConfig list with default" " values." ) self.prior_configs.append(JukeboxPriorConfig(**prior_config)) self.hop_fraction = self.vqvae_config.hop_fraction self.nb_priors = nb_priors # Metadata conditioning self.max_nb_genres = max_nb_genres self.sampling_rate = sampling_rate self.timing_dims = timing_dims self.min_duration = min_duration self.max_duration = max_duration self.metadata_conditioning = metadata_conditioning super().__init__(**kwargs) @classmethod def from_configs(cls, prior_configs: List[JukeboxPriorConfig], vqvae_config: JukeboxVQVAEConfig, **kwargs): r""" Instantiate a [`JukeboxConfig`] (or a derived class) from clip text model configuration and clip vision model configuration. Returns: [`JukeboxConfig`]: An instance of a configuration object """ prior_config_list = [config.to_dict() for config in prior_configs] return cls(prior_config_list=prior_config_list, vqvae_config_dict=vqvae_config.to_dict(), **kwargs) def to_dict(self): """ Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`]. Returns: `Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance, """ output = copy.deepcopy(self.__dict__) for i, config in enumerate(output.pop("prior_configs")): output[f"prior_{i}"] = config.to_dict() output["vqvae_config"] = self.vqvae_config.to_dict() output["model_type"] = self.__class__.model_type return output
27182812/ChatGLM-LLaMA-chinese-insturct
11,780
src/transformers/models/jukebox/convert_jukebox.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert Jukebox checkpoints""" import argparse import json import os from pathlib import Path import requests import torch from transformers import JukeboxConfig, JukeboxModel from transformers.utils import logging logging.set_verbosity_info() logger = logging.get_logger(__name__) PREFIX = "https://openaipublic.azureedge.net/jukebox/models/" MODEL_MAPPING = { "jukebox-1b-lyrics": [ "5b/vqvae.pth.tar", "5b/prior_level_0.pth.tar", "5b/prior_level_1.pth.tar", "1b_lyrics/prior_level_2.pth.tar", ], "jukebox-5b-lyrics": [ "5b/vqvae.pth.tar", "5b/prior_level_0.pth.tar", "5b/prior_level_1.pth.tar", "5b_lyrics/prior_level_2.pth.tar", ], } def replace_key(key): if key.endswith(".model.1.bias") and len(key.split(".")) > 10: key = key.replace(".model.1.bias", ".conv1d_1.bias") elif key.endswith(".model.1.weight") and len(key.split(".")) > 10: key = key.replace(".model.1.weight", ".conv1d_1.weight") elif key.endswith(".model.3.bias") and len(key.split(".")) > 10: key = key.replace(".model.3.bias", ".conv1d_2.bias") elif key.endswith(".model.3.weight") and len(key.split(".")) > 10: key = key.replace(".model.3.weight", ".conv1d_2.weight") if "conditioner_blocks.0." in key: key = key.replace("conditioner_blocks.0", "conditioner_blocks") if "prime_prior" in key: key = key.replace("prime_prior", "encoder") if ".emb." in key and "total" not in key and "absolute" not in key and "relative" not in key: key = key.replace(".emb.", ".") if key.endswith("k"): # replace vqvae.X.k with vqvae.X.codebook return key.replace(".k", ".codebook") if "y_emb." in key: return key.replace("y_emb.", "metadata_embedding.") if "x_emb.emb." in key: key = key.replace("0.x_emb.emb", "embed_tokens") if "prime_state_ln" in key: return key.replace("prime_state_ln", "encoder.final_layer_norm") if ".ln" in key: return key.replace(".ln", ".layer_norm") if "_ln" in key: return key.replace("_ln", "_layer_norm") if "prime_state_proj" in key: return key.replace("prime_state_proj", "encoder.proj_in") if "prime_x_out" in key: return key.replace("prime_x_out", "encoder.lm_head") if "prior.x_out" in key: return key.replace("x_out", "fc_proj_out") if "x_emb" in key: return key.replace("x_emb", "embed_tokens") return key def fix_jukebox_keys(state_dict, model_state_dict, key_prefix, mapping): new_dict = {} import re re_encoder_block_conv_in = re.compile("encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)") re_encoder_block_resnet = re.compile( "encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)" ) re_encoder_block_proj_out = re.compile("encoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)") re_decoder_block_conv_out = re.compile("decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)") re_decoder_block_resnet = re.compile( "decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)" ) re_decoder_block_proj_in = re.compile("decoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)") re_prior_cond_conv_out = re.compile("conditioner_blocks.(\d*).cond.model.(\d*).(\d).(bias|weight)") re_prior_cond_resnet = re.compile( "conditioner_blocks.(\d*).cond.model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)" ) re_prior_cond_proj_in = re.compile("conditioner_blocks.(\d*).cond.model.(\d*).(bias|weight)") for original_key, value in state_dict.items(): # rename vqvae.encoder keys if re_encoder_block_conv_in.fullmatch(original_key): regex_match = re_encoder_block_conv_in.match(original_key) groups = regex_match.groups() block_index = int(groups[2]) * 2 + int(groups[3]) re_new_key = f"encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.{groups[-1]}" key = re_encoder_block_conv_in.sub(re_new_key, original_key) elif re_encoder_block_resnet.fullmatch(original_key): regex_match = re_encoder_block_resnet.match(original_key) groups = regex_match.groups() block_index = int(groups[2]) * 2 + int(groups[3]) conv_index = {"1": 1, "3": 2}[groups[-2]] prefix = f"encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}." resnet_block = f"resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}" re_new_key = prefix + resnet_block key = re_encoder_block_resnet.sub(re_new_key, original_key) elif re_encoder_block_proj_out.fullmatch(original_key): regex_match = re_encoder_block_proj_out.match(original_key) groups = regex_match.groups() re_new_key = f"encoders.{groups[0]}.level_blocks.{groups[1]}.proj_out.{groups[-1]}" key = re_encoder_block_proj_out.sub(re_new_key, original_key) # rename vqvae.decoder keys elif re_decoder_block_conv_out.fullmatch(original_key): regex_match = re_decoder_block_conv_out.match(original_key) groups = regex_match.groups() block_index = int(groups[2]) * 2 + int(groups[3]) - 2 re_new_key = f"decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.{groups[-1]}" key = re_decoder_block_conv_out.sub(re_new_key, original_key) elif re_decoder_block_resnet.fullmatch(original_key): regex_match = re_decoder_block_resnet.match(original_key) groups = regex_match.groups() block_index = int(groups[2]) * 2 + int(groups[3]) - 2 conv_index = {"1": 1, "3": 2}[groups[-2]] prefix = f"decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}." resnet_block = f"resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}" re_new_key = prefix + resnet_block key = re_decoder_block_resnet.sub(re_new_key, original_key) elif re_decoder_block_proj_in.fullmatch(original_key): regex_match = re_decoder_block_proj_in.match(original_key) groups = regex_match.groups() re_new_key = f"decoders.{groups[0]}.level_blocks.{groups[1]}.proj_in.{groups[-1]}" key = re_decoder_block_proj_in.sub(re_new_key, original_key) # rename prior cond.model to upsampler.upsample_block and resnet elif re_prior_cond_conv_out.fullmatch(original_key): regex_match = re_prior_cond_conv_out.match(original_key) groups = regex_match.groups() block_index = int(groups[1]) * 2 + int(groups[2]) - 2 re_new_key = f"conditioner_blocks.upsampler.upsample_block.{block_index}.{groups[-1]}" key = re_prior_cond_conv_out.sub(re_new_key, original_key) elif re_prior_cond_resnet.fullmatch(original_key): regex_match = re_prior_cond_resnet.match(original_key) groups = regex_match.groups() block_index = int(groups[1]) * 2 + int(groups[2]) - 2 conv_index = {"1": 1, "3": 2}[groups[-2]] prefix = f"conditioner_blocks.upsampler.upsample_block.{block_index}." resnet_block = f"resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}" re_new_key = prefix + resnet_block key = re_prior_cond_resnet.sub(re_new_key, original_key) elif re_prior_cond_proj_in.fullmatch(original_key): regex_match = re_prior_cond_proj_in.match(original_key) groups = regex_match.groups() re_new_key = f"conditioner_blocks.upsampler.proj_in.{groups[-1]}" key = re_prior_cond_proj_in.sub(re_new_key, original_key) # keep original key else: key = original_key key = replace_key(key) if f"{key_prefix}.{key}" not in model_state_dict or key is None: print(f"failed converting {original_key} to {key}, does not match") # handle missmatched shape elif value.shape != model_state_dict[f"{key_prefix}.{key}"].shape: val = model_state_dict[f"{key_prefix}.{key}"] print(f"{original_key}-> {key} : \nshape {val.shape} and { value.shape}, do not match") key = original_key mapping[key] = original_key new_dict[key] = value return new_dict @torch.no_grad() def convert_openai_checkpoint(model_name=None, pytorch_dump_folder_path=None): """ Copy/paste/tweak model's weights to our Jukebox structure. """ for file in MODEL_MAPPING[model_name]: if not os.path.isfile(f"{pytorch_dump_folder_path}/{file.split('/')[-1]}"): r = requests.get(f"{PREFIX}{file}", allow_redirects=True) os.makedirs(f"{pytorch_dump_folder_path}/", exist_ok=True) open(f"{pytorch_dump_folder_path}/{file.split('/')[-1]}", "wb").write(r.content) model_to_convert = MODEL_MAPPING[model_name.split("/")[-1]] config = JukeboxConfig.from_pretrained(model_name) model = JukeboxModel(config) weight_dict = [] mapping = {} for i, dict_name in enumerate(model_to_convert): old_dic = torch.load(f"{pytorch_dump_folder_path}/{dict_name.split('/')[-1]}")["model"] new_dic = {} for k in old_dic.keys(): if k.endswith(".b"): new_dic[k.replace("b", "bias")] = old_dic[k] elif k.endswith(".w"): new_dic[k.replace("w", "weight")] = old_dic[k] elif "level_2" not in dict_name and "cond.model." in k: new_dic[k.replace(".blocks.", ".model.")] = old_dic[k] else: new_dic[k] = old_dic[k] key_prefix = "vqvae" if i == 0 else f"priors.{3 - i}" new_dic = fix_jukebox_keys(new_dic, model.state_dict(), key_prefix, mapping) weight_dict.append(new_dic) vqvae_state_dict = weight_dict.pop(0) model.vqvae.load_state_dict(vqvae_state_dict) for i in range(len(weight_dict)): model.priors[i].load_state_dict(weight_dict[2 - i]) Path(pytorch_dump_folder_path).mkdir(exist_ok=True) with open(f"{pytorch_dump_folder_path}/mapping.json", "w") as txtfile: json.dump(mapping, txtfile) print(f"Saving model {model_name} to {pytorch_dump_folder_path}") model.save_pretrained(pytorch_dump_folder_path) return weight_dict if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--model_name", default="jukebox-5b-lyrics", type=str, help="Name of the model you'd like to convert.", ) parser.add_argument( "--pytorch_dump_folder_path", default="jukebox-5b-lyrics-converted", type=str, help="Path to the output PyTorch model directory.", ) args = parser.parse_args() convert_openai_checkpoint(args.model_name, args.pytorch_dump_folder_path)
27182812/ChatGLM-LLaMA-chinese-insturct
18,022
src/transformers/models/jukebox/tokenization_jukebox.py
# coding=utf-8 # Copyright 2022 The Open AI Team Authors and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenization classes for OpenAI Jukebox.""" import json import os import re import unicodedata from json.encoder import INFINITY from typing import Any, Dict, List, Optional, Tuple, Union import numpy as np import regex from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...tokenization_utils_base import BatchEncoding from ...utils import TensorType, is_flax_available, is_tf_available, is_torch_available, logging from ...utils.generic import _is_jax, _is_numpy logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = { "artists_file": "artists.json", "lyrics_file": "lyrics.json", "genres_file": "genres.json", } PRETRAINED_VOCAB_FILES_MAP = { "artists_file": { "jukebox": "https://huggingface.co/ArthurZ/jukebox/blob/main/artists.json", }, "genres_file": { "jukebox": "https://huggingface.co/ArthurZ/jukebox/blob/main/genres.json", }, "lyrics_file": { "jukebox": "https://huggingface.co/ArthurZ/jukebox/blob/main/lyrics.json", }, } PRETRAINED_LYRIC_TOKENS_SIZES = { "jukebox": 512, } class JukeboxTokenizer(PreTrainedTokenizer): """ Constructs a Jukebox tokenizer. Jukebox can be conditioned on 3 different inputs : - Artists, unique ids are associated to each artist from the provided dictionary. - Genres, unique ids are associated to each genre from the provided dictionary. - Lyrics, character based tokenization. Must be initialized with the list of characters that are inside the vocabulary. This tokenizer does not require training. It should be able to process a different number of inputs: as the conditioning of the model can be done on the three different queries. If None is provided, defaults values will be used.: Depending on the number of genres on which the model should be conditioned (`n_genres`). ``` >>> from transformers import JukeboxTokenizer >>> tokenizer = JukeboxTokenizer.from_pretrained("openai/jukebox-1b-lyrics") >>> tokenizer("Alan Jackson", "Country Rock", "old town road")['input_ids'] [tensor([[ 0, 0, 0, 6785, 546, 41, 38, 30, 76, 46, 41, 49, 40, 76, 44, 41, 27, 30]]), tensor([[ 0, 0, 0, 145, 0]]), tensor([[ 0, 0, 0, 145, 0]])] ``` You can get around that behavior by passing `add_prefix_space=True` when instantiating this tokenizer or when you call it on some text, but since the model was not pretrained this way, it might yield a decrease in performance. <Tip> If nothing is provided, the genres and the artist will either be selected randomly or set to None </Tip> This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to: this superclass for more information regarding those methods. However the code does not allow that and only supports composing from various genres. Args: artists_file (`str`): Path to the vocabulary file which contains a mapping between artists and ids. The default file supports both "v2" and "v3" genres_file (`str`): Path to the vocabulary file which contain a mapping between genres and ids. lyrics_file (`str`): Path to the vocabulary file which contains the accepted characters for the lyrics tokenization. version (`List[str]`, `optional`, default to `["v3", "v2", "v2"]`) : List of the tokenizer versions. The `5b-lyrics`'s top level prior model was trained using `v3` instead of `v2`. n_genres (`int`, `optional`, defaults to 1): Maximum number of genres to use for composition. max_n_lyric_tokens (`int`, `optional`, defaults to 512): Maximum number of lyric tokens to keep. unk_token (`str`, *optional*, defaults to `"<|endoftext|>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_lyric_input_size = PRETRAINED_LYRIC_TOKENS_SIZES model_input_names = ["input_ids", "attention_mask"] def __init__( self, artists_file, genres_file, lyrics_file, version=["v3", "v2", "v2"], max_n_lyric_tokens=512, n_genres=5, unk_token="<|endoftext|>", **kwargs, ): unk_token = AddedToken(unk_token, lstrip=False, rstrip=False) if isinstance(unk_token, str) else unk_token super().__init__( unk_token=unk_token, n_genres=n_genres, version=version, max_n_lyric_tokens=max_n_lyric_tokens, **kwargs, ) self.version = version self.max_n_lyric_tokens = max_n_lyric_tokens self.n_genres = n_genres with open(artists_file, encoding="utf-8") as vocab_handle: self.artists_encoder = json.load(vocab_handle) with open(genres_file, encoding="utf-8") as vocab_handle: self.genres_encoder = json.load(vocab_handle) with open(lyrics_file, encoding="utf-8") as vocab_handle: self.lyrics_encoder = json.load(vocab_handle) oov = "[^A-Za-z0-9.,:;!?\-'\"()\[\] \t\n]+" # In v2, we had a n_vocab=80 and in v3 we missed + and so n_vocab=79 of characters. if len(self.lyrics_encoder) == 79: oov = oov.replace("\-'", "\-+'") self.out_of_vocab = regex.compile(oov) self.artists_decoder = {v: k for k, v in self.artists_encoder.items()} self.genres_decoder = {v: k for k, v in self.genres_encoder.items()} self.lyrics_decoder = {v: k for k, v in self.lyrics_encoder.items()} @property def vocab_size(self): return len(self.artists_encoder) + len(self.genres_encoder) + len(self.lyrics_encoder) def get_vocab(self): return dict(self.artists_encoder, self.genres_encoder, self.lyrics_encoder) def _convert_token_to_id(self, list_artists, list_genres, list_lyrics): """Converts the artist, genre and lyrics tokens to their index using the vocabulary. The total_length, offset and duration have to be provided in order to select relevant lyrics and add padding to the lyrics token sequence. """ artists_id = [self.artists_encoder.get(artist, 0) for artist in list_artists] for genres in range(len(list_genres)): list_genres[genres] = [self.genres_encoder.get(genre, 0) for genre in list_genres[genres]] list_genres[genres] = list_genres[genres] + [-1] * (self.n_genres - len(list_genres[genres])) lyric_ids = [[self.lyrics_encoder.get(character, 0) for character in list_lyrics[0]], [], []] return artists_id, list_genres, lyric_ids def _tokenize(self, lyrics): """ Converts a string in a sequence of tokens (string), using the tokenizer. Split in words for word-based vocabulary or sub-words for sub-word-based vocabularies (BPE/SentencePieces/WordPieces). Do NOT take care of added tokens. Only the lyrics are split into character for the character-based vocabulary. """ # only lyrics are not tokenized, but character based is easily handled return list(lyrics) def tokenize(self, artist, genre, lyrics, **kwargs): """ Converts three strings in a 3 sequence of tokens using the tokenizer """ artist, genre, lyrics = self.prepare_for_tokenization(artist, genre, lyrics) lyrics = self._tokenize(lyrics) return artist, genre, lyrics def prepare_for_tokenization( self, artists: str, genres: str, lyrics: str, is_split_into_words: bool = False ) -> Tuple[str, str, str, Dict[str, Any]]: """ Performs any necessary transformations before tokenization. This method should pop the arguments from kwargs and return the remaining `kwargs` as well. We test the `kwargs` at the end of the encoding process to be sure all the arguments have been used. Args: artist (`str`): The artist name to prepare. This will mostly lower the string genres (`str`): The genre name to prepare. This will mostly lower the string. lyrics (`str`): The lyrics to prepare. is_split_into_words (`bool`, *optional*, defaults to `False`): Whether or not the input is already pre-tokenized (e.g., split into words). If set to `True`, the tokenizer assumes the input is already split into words (for instance, by splitting it on whitespace) which it will tokenize. This is useful for NER or token classification. kwargs: Keyword arguments to use for the tokenization. """ for idx in range(len(self.version)): if self.version[idx] == "v3": artists[idx] = artists[idx].lower() genres[idx] = [genres[idx].lower()] else: artists[idx] = self._normalize(artists[idx]) + ".v2" genres[idx] = [ self._normalize(genre) + ".v2" for genre in genres[idx].split("_") ] # split is for the full dictionary with combined genres if self.version[0] == "v2": self.out_of_vocab = regex.compile("[^A-Za-z0-9.,:;!?\-'\"()\[\] \t\n]+") vocab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789.,:;!?-+'\"()[] \t\n" self.vocab = {vocab[index]: index + 1 for index in range(len(vocab))} self.vocab["<unk>"] = 0 self.n_vocab = len(vocab) + 1 self.lyrics_encoder = self.vocab self.lyrics_decoder = {v: k for k, v in self.vocab.items()} self.lyrics_decoder[0] = "" else: self.out_of_vocab = regex.compile("[^A-Za-z0-9.,:;!?\-+'\"()\[\] \t\n]+") lyrics = self._run_strip_accents(lyrics) lyrics = lyrics.replace("\\", "\n") lyrics = self.out_of_vocab.sub("", lyrics), [], [] return artists, genres, lyrics def _run_strip_accents(self, text): """Strips accents from a piece of text.""" text = unicodedata.normalize("NFD", text) output = [] for char in text: cat = unicodedata.category(char) if cat == "Mn": continue output.append(char) return "".join(output) def _normalize(self, text: str) -> str: """ Normalizes the input text. This process is for the genres and the artist Args: text (`str`): Artist or Genre string to normalize """ accepted = ( [chr(i) for i in range(ord("a"), ord("z") + 1)] + [chr(i) for i in range(ord("A"), ord("Z") + 1)] + [chr(i) for i in range(ord("0"), ord("9") + 1)] + ["."] ) accepted = frozenset(accepted) pattern = re.compile(r"_+") text = "".join([c if c in accepted else "_" for c in text.lower()]) text = pattern.sub("_", text).strip("_") return text def convert_lyric_tokens_to_string(self, lyrics: List[str]) -> str: return " ".join(lyrics) def convert_to_tensors( self, inputs, tensor_type: Optional[Union[str, TensorType]] = None, prepend_batch_axis: bool = False ): """ Convert the inner content to tensors. Args: tensor_type (`str` or [`~utils.TensorType`], *optional*): The type of tensors to use. If `str`, should be one of the values of the enum [`~utils.TensorType`]. If unset, no modification is done. prepend_batch_axis (`int`, *optional*, defaults to `False`): Whether or not to add the batch dimension during the conversion. """ # Convert to TensorType if not isinstance(tensor_type, TensorType): tensor_type = TensorType(tensor_type) # Get a function reference for the correct framework if tensor_type == TensorType.TENSORFLOW: if not is_tf_available(): raise ImportError( "Unable to convert output to TensorFlow tensors format, TensorFlow is not installed." ) import tensorflow as tf as_tensor = tf.constant is_tensor = tf.is_tensor elif tensor_type == TensorType.PYTORCH: if not is_torch_available(): raise ImportError("Unable to convert output to PyTorch tensors format, PyTorch is not installed.") import torch as_tensor = torch.tensor is_tensor = torch.is_tensor elif tensor_type == TensorType.JAX: if not is_flax_available(): raise ImportError("Unable to convert output to JAX tensors format, JAX is not installed.") import jax.numpy as jnp # noqa: F811 as_tensor = jnp.array is_tensor = _is_jax else: as_tensor = np.asarray is_tensor = _is_numpy # Do the tensor conversion in batch try: if prepend_batch_axis: inputs = [inputs] if not is_tensor(inputs): inputs = as_tensor(inputs) except: # noqa E722 raise ValueError( "Unable to create tensor, you should probably activate truncation and/or padding " "with 'padding=True' 'truncation=True' to have batched tensors with the same length." ) return inputs def __call__(self, artist, genres, lyrics="", return_tensors="pt") -> BatchEncoding: """Convert the raw string to a list of token ids Args: artist (`str`): Name of the artist. genres (`str`): List of genres that will be mixed to condition the audio lyrics (`str`, *optional*, defaults to `""`): Lyrics used to condition the generation """ input_ids = [0, 0, 0] artist = [artist] * len(self.version) genres = [genres] * len(self.version) artists_tokens, genres_tokens, lyrics_tokens = self.tokenize(artist, genres, lyrics) artists_id, genres_ids, full_tokens = self._convert_token_to_id(artists_tokens, genres_tokens, lyrics_tokens) attention_masks = [-INFINITY] * len(full_tokens[-1]) input_ids = [ self.convert_to_tensors( [input_ids + [artists_id[i]] + genres_ids[i] + full_tokens[i]], tensor_type=return_tensors ) for i in range(len(self.version)) ] return BatchEncoding({"input_ids": input_ids, "attention_masks": attention_masks}) def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: """ Saves the tokenizer's vocabulary dictionary to the provided save_directory. Args: save_directory (`str`): A path to the directory where to saved. It will be created if it doesn't exist. filename_prefix (`Optional[str]`, *optional*): A prefix to add to the names of the files saved by the tokenizer. """ if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return artists_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["artists_file"] ) with open(artists_file, "w", encoding="utf-8") as f: f.write(json.dumps(self.artists_encoder, ensure_ascii=False)) genres_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["genres_file"] ) with open(genres_file, "w", encoding="utf-8") as f: f.write(json.dumps(self.genres_encoder, ensure_ascii=False)) lyrics_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["lyrics_file"] ) with open(lyrics_file, "w", encoding="utf-8") as f: f.write(json.dumps(self.lyrics_encoder, ensure_ascii=False)) return (artists_file, genres_file, lyrics_file) def _convert_id_to_token(self, artists_index, genres_index, lyric_index): """ Converts an index (integer) in a token (str) using the vocab. Args: artists_index (`int`): Index of the artist in its corresponding dictionary. genres_index (`Union[List[int], int]`): Index of the genre in its corresponding dictionary. lyric_index (`List[int]`): List of character indices, which each correspond to a character. """ artist = self.artists_decoder.get(artists_index) genres = [self.genres_decoder.get(genre) for genre in genres_index] lyrics = [self.lyrics_decoder.get(character) for character in lyric_index] return artist, genres, lyrics
2833844911/cy_jsvmp
484,047
outsrc/out3.js
var constantPool=["_regeneratorRuntime","exports","awcbb_yhh_fun2","Object","prototype","Op","hasOwnProperty","hasOwn","defineProperty","obj","key","desc","value","awcbb_yhh_fun3","Symbol","function","$Symbol","iterator","@@iterator","iteratorSymbol","asyncIterator","@@asyncIterator","asyncIteratorSymbol","toStringTag","@@toStringTag","toStringTagSymbol","enumerable",0,"configurable","writable","","define","err","awcbb_yhh_fun5","innerFn","outerFn","self","tryLocsList","Generator","protoGenerator","create","generator","Context","context","_invoke","makeInvokeMethod","fn","arg","type","normal","call","throw","wrap","ContinueSentinel","IteratorPrototype","awcbb_yhh_fun11","getPrototypeOf","getProto","values","NativeIteratorPrototype","GeneratorFunctionPrototype","Gp","method","awcbb_yhh_fun14","awcbb_yhh_fun13","next","return","forEach","PromiseImpl","resolve","reject","tryCatch","record","result","object","__await","unwrapped","awcbb_yhh_fun17","error","invoke","awcbb_yhh_fun18","then","awcbb_yhh_fun19","awcbb_yhh_fun20","awcbb_yhh_fun23","previousPromise","callInvokeWithMethodAndArg","awcbb_yhh_fun21","suspendedStart","state","executing","Generator is already running","Error","completed","doneResult",null,!0,"delegate","maybeInvokeDelegate","delegateResult","abrupt","dispatchException","sent","_sent","done","suspendedYield","awcbb_yhh_fun25","methodName","undefined","' method","The iterator does not provide a '","TypeError","info","iterator result is not an object","resultName","nextLoc","locs","tryLoc","entry",1,"catchLoc",2,"finallyLoc","afterLoc",3,"tryEntries","push","completion","root","pushTryEntry","reset","iterable","iteratorMethod","length","isNaN","i","awcbb_yhh_fun31","GeneratorFunction","constructor","displayName","isGeneratorFunction","genFun","ctor","name","awcbb_yhh_fun33","mark","setPrototypeOf","__proto__","awcbb_yhh_fun34","awrap","awcbb_yhh_fun35","AsyncIterator","defineIteratorMethods","awcbb_yhh_fun36","async","Promise","iter","awcbb_yhh_fun38","awcbb_yhh_fun37","awcbb_yhh_fun39","toString","[object Generator]","awcbb_yhh_fun40","keys","val","for_in_xh_cbb","for_in_xh_cbb_list","reverse","pop","awcbb_yhh_fun42","awcbb_yhh_fun41","skipTempReset","prev","resetTryEntry","charAt","t","slice","awcbb_yhh_fun43","stop","rootRecord","rval","awcbb_yhh_fun44","exception","loc","caught","end","handle","hasCatch","hasFinally","try statement without catch or finally","awcbb_yhh_fun45","finallyEntry","break","continue","complete","awcbb_yhh_fun47","awcbb_yhh_fun48","finish","awcbb_yhh_fun49","catch","thrown","illegal catch attempt","awcbb_yhh_fun50","delegateYield","awcbb_yhh_fun51","gen","_next","_throw","arguments","args","apply","asyncGeneratorStep","awcbb_yhh_fun55","awcbb_yhh_fun54","link","document","createElement","relList","e","supports","modulepreload","cbbiyhh",'link[rel="modulepreload"]',"querySelectorAll","a","o","childList","subtree","addedNodes","d","LINK","tagName","rel","awcbb_yhh_fun59","MutationObserver","observe","integrity","referrerpolicy","referrerPolicy","use-credentials","crossorigin","anonymous","credentials","same-origin","omit","include","ep","n","href","fetch","awcbb_yhh_fun58","L","init","mqtt_client","connected","clientId","clean","reconnectPeriod",100,"username","openai","password","ed076287532e86365e841e92bfc50d8c","wss://mqtt.suanzisu.com/mqtt","J","connect","setMqttClient","commit","awcbb_yhh_fun63","on","qos","subscribe","awcbb_yhh_fun64","message","f","JSON","parse","p","ut","awcbb_yhh_fun65","awcbb_yhh_fun62",5002,"code","token","s","store","rm","login_data","/login","$","func",200,"data","dispatch","msg","duration",5e3,"k","afterReload","success","awcbb_yhh_fun66","timeAgo","Date","getTime",31,24,60,1e3,"刚刚","未来","分钟前","小时前","天前","getFullYear",4,"digit","getMonth","getDate","getHours","getMinutes","getSeconds",":","join"," ","-","awcbb_yhh_fun67","String","0",10,"Math","pow","awcbb_yhh_fun68","formatPrice","[^\\d.]","g","replace","\\.{2,}",".","$#$","\\.","^(\\-)*(\\d+)\\.(\\d\\d).*$","$1$2.$3","indexOf","parseFloat","awcbb_yhh_fun69","formatDate","awcbb_yhh_fun70","init_mqtt","mqtt_id","get","openai_topic/","openai_cid/","set","awcbb_yhh_fun72","/index/mqtt_id.htm","y","awcbb_yhh_fun71","formatSecond","0 秒","parseInt","秒","分","小时","awcbb_yhh_fun73","formatKb","0 B",1024,"B","KB","MB","GB","TB","PB","EB","ZB","YB","log","floor","toFixed","awcbb_yhh_fun74","toDateString","string","awcbb_yhh_fun76",'Invalid Msec for "util.toDateString(Msec)"',"hint","yyyy-MM-dd HH:mm:ss","ss","mm","HH","dd","MM","yyyy","awcbb_yhh_fun75","isPhoneNo","^1[3456789]\\d{9}$","test","awcbb_yhh_fun77","onShow","visibilitychange","visibilityState","awcbb_yhh_fun79","addEventListener","awcbb_yhh_fun78","getNoticeTxt","_txt","now","_time","notice_txt","txt","awcbb_yhh_fun81","_txt.htm","/index/","awcbb_yhh_fun80","randomStr","openai-",16,1e7,"random","substr","awcbb_yhh_fun82","awcbb_yhh_fun83","nextMonths","yyyy-MM-dd","split","_t$split",13,"b","awcbb_yhh_fun84","randerListForCreateTime","Array","create_time","yyyy-MM-dd HH:mm","last_active_time","--","confirm_time","end_time","_end_time_txt"," [已过期]","_end_time","awcbb_yhh_fun86","map","awcbb_yhh_fun85","myear","awcbb_yhh_fun87","localStorage","getItem","_unused","awcbb_yhh_fun88","stringify","setItem","awcbb_yhh_fun89","clear","removeItem","awcbb_yhh_fun90","getAfterMons","setMonth","awcbb_yhh_fun91","afterDays","abs",3600,"awcbb_yhh_fun92","__vccOpts","console","awcbb_yhh_fun93","K","data-v-a13a70b9","W","Z","awcbb_yhh_fun94","mt","class","msg-box","_t","text-wrapper","pt","flex","ft","span","spin","m","awcbb_yhh_fun95","gt","ml10","xt","flex f12","ht","innerHTML","yt","button-input mt10","bt","__name","home","setup","P","C","input","postTxt","chat","awcbb_yhh_fun97","notice","awcbb_yhh_fun98","D","H","j","T","N","A","r","_","style","padding-top","default","div","z","v","openai_txt","c","border-bottom f14","U","x","awcbb_yhh_fun103","l",1032,"awcbb_yhh_fun102","V",128,"I","txtBtn","loading","E","flex f12 mt10 p20 txt-left flex-column bg-ff","xs",20,"sm","md",8,"input_loading","input_box_txt","miao","awcbb_yhh_fun104","mt20","flex txt-left flex-j-center f12 color-6f","awcbb_yhh_fun105","awcbb_yhh_fun101","u","modelValue","onUpdate:modelValue","awcbb_yhh_fun106","autosize","size","large","placeholder","请输入(输入 清空 将清空对话内容)","maxlength","240","clearable","disabled","minlength","onKeyup","enter","O","append","onClick","disa","w","awcbb_yhh_fun108","awcbb_yhh_fun107","awcbb_yhh_fun100","awcbb_yhh_fun99","awcbb_yhh_fun96","vt","__scopeId","wt","flex p20 login-box flex-column","$t","flex flex-i-center flex-column f14 color-6f","Bt","flex flex-column pay_qr","Mt","flex mt40","kt","It","flex flex-j-center flex-i-center","Tt","flex flex-j-center mt20 flex-i-center f14","St","login","phone","btn","登 陆","code_btn","获取","get_code","timer","num","account_type","account_type_txt","tips","total_fee","pay_qr","pay_ment","initNavi","awcbb_yhh_fun110",11,"手机格式错误!","验证码不正确!","提交中","awcbb_yhh_fun112","awcbb_yhh_fun113","/index/login.htm","post","awcbb_yhh_fun111","获取中","awcbb_yhh_fun115","clearInterval","awcbb_yhh_fun117","setInterval","验证码已发送,请注意查收","awcbb_yhh_fun116","/index/get_code.htm??phone=","awcbb_yhh_fun114","/home","setBtn","awcbb_yhh_fun118","login_txt","退出成功","awcbb_yhh_fun121","/index/logout.htm","awcbb_yhh_fun120","确定退出吗","confirmButtonText","确认","cancelButtonText","取消","h","confirm","awcbb_yhh_fun119","pc","wap","img","M","body","appendChild","forms","submit","awcbb_yhh_fun123","&pay_ment=","/pay.htm?m=","awcbb_yhh_fun122","awcbb_yhh_fun125","/index/login_txt.htm","awcbb_yhh_fun124","(phone|pad|pod|iPhone|iPod|ios|iPad|Android|Mobile|BlackBerry|IEMobile|MQQBrowser|JUC|Fennec|wOSBrowser|BrowserNG|WebOS|Symbian|Windows Phone)","navigator","userAgent","match","awcbb_yhh_fun126","X","S","tt","R","F","Q","G","flex-i-center flex-j-center","label-width","80px","label","手机号","q","awcbb_yhh_fun131","awcbb_yhh_fun130","验证码","awcbb_yhh_fun133","awcbb_yhh_fun135","awcbb_yhh_fun134","awcbb_yhh_fun132","primary","awcbb_yhh_fun137","awcbb_yhh_fun136","awcbb_yhh_fun129","扫码支付","alipay","微信","支付宝","请使用","mt10","src","当前账户类型:","flex txt-left","h3","元】","升级账户需要【","awcbb_yhh_fun138","awcbb_yhh_fun139","wxpay","awcbb_yhh_fun140","awcbb_yhh_fun141","text","退出登陆","awcbb_yhh_fun142","awcbb_yhh_fun128","awcbb_yhh_fun127","awcbb_yhh_fun109","qt","data-v-8787f27d","Dt","flex flex-j-center flex-i-center w100b h100v","Ct","/","awcbb_yhh_fun144","et","icon","title","发生错误","extra","返回主页","awcbb_yhh_fun147","awcbb_yhh_fun146","awcbb_yhh_fun145","awcbb_yhh_fun143","Nt","At","Ot","Et","Lt","Ht","awcbb_yhh_fun149","flex flex-column","$store","input_img","awcbb_yhh_fun152","要找什么样的图片","postImg","awcbb_yhh_fun153","awcbb_yhh_fun155","awcbb_yhh_fun156","awcbb_yhh_fun154","openai_img","f12 txt-left color-6f","p20","awcbb_yhh_fun158","awcbb_yhh_fun159","awcbb_yhh_fun157","gutter",12,"lg",6,"xl","id","img_url","fit","fill","preview-src-list","openai_img_arr","initial-index","awcbb_yhh_fun162","awcbb_yhh_fun161","mt40 flex-i-center flex-j-center","clearImg","awcbb_yhh_fun164","清空","awcbb_yhh_fun165","awcbb_yhh_fun163","awcbb_yhh_fun160","awcbb_yhh_fun151","awcbb_yhh_fun150","awcbb_yhh_fun148","jt","Vt","zt","Pt","Kt","Ut","flex f12 color-6f","Ft","search","awcbb_yhh_fun167","input_search","awcbb_yhh_fun170","请输入描述","awcbb_yhh_fun171","awcbb_yhh_fun173","awcbb_yhh_fun174","awcbb_yhh_fun172","search_result","awcbb_yhh_fun176","awcbb_yhh_fun177","awcbb_yhh_fun175","flex-i-center flex-j-center f14 txt-left mt20","awcbb_yhh_fun179","awcbb_yhh_fun178","awcbb_yhh_fun169","awcbb_yhh_fun168","awcbb_yhh_fun166","Yt","path","redirect","meta","auth","show","主页","component","/img","/search","/:catchAll(.*)","Rt","history","at","routes","nt","awcbb_yhh_fun180","beforeEach","Y","defaults","timeout",120,"baseURL","_context","headers","x-token","x-mqtt-id","content-type","application/json","multipart/form-data;charset=UTF-8","application/x-www-form-urlencoded; charset=UTF-8","ot","-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDUG4vL94YTeyhb9vSlCRTpQf+WhPZD2pGz4x6XlqK9sVA6vfWU014BUqJYpu7d0a0/7JkW9jQvrVjSpSUN7gZqc7c1p84dlAQf3h781vof8gZVso62yOfM8Jjebq1g8lnIa10p7mWH2qdesYZ7NDH4bl0N9DhAY8+jYnuAmx5C1QIDAQAB\n-----END PUBLIC KEY-----","st","setPublicKey","x-request-by","web","x-req-pwd","web|","encrypt",null,271,281,"awcbb_yhh_fun182","_callee","awcbb_yhh_fun181","_asyncToGenerator","请求超时,请检查是否已连网","alert","awcbb_yhh_fun183","interceptors","request","use","head","config","status",5109,5116,5117,5118,5119,5120,"yes","is_open_modal","awcbb_yhh_fun185","setTimeout",5107,"操作失败!","操作失败!请刷新页面","showClose","callback","window","location","reload","awcbb_yhh_fun186","awcbb_yhh_fun187","go","awcbb_yhh_fun188","awcbb_yhh_fun189","登陆状态已失效,请重新登陆","awcbb_yhh_fun190","awcbb_yhh_fun184","response","awcbb_yhh_fun191","w100b flex flex-j-center","Qt","flex flex-j-center navi","Gt","App","awcbb_yhh_fun194","navi_list","filter","is","awcbb_yhh_fun193","pathname","substring","visible","awcbb_yhh_fun196","awcbb_yhh_fun195","router-view","lt","it","flex-direction","column","bg-f","navi-item","awcbb_yhh_fun200","awcbb_yhh_fun201","awcbb_yhh_fun199","awcbb_yhh_fun198","awcbb_yhh_fun197","awcbb_yhh_fun192","Jt","data-v-2296b3a7","Wt","提交","不好意思有点慢","我还在酝酿中","别急,我没死机,再等下","因为咱用的别人家免费的接口,慢是正常的","再次抱歉,我还在运行中","没办法,也不想这么慢","idx","假装思考中","all_times","对话模式","搜索模式","找图","登录","mutations","setOpenaiTxt","awcbb_yhh_fun202","setOpenaiSearch","awcbb_yhh_fun203","awcbb_yhh_fun204","clearOpenaiList","awcbb_yhh_fun205","setOpenaiImg","awcbb_yhh_fun206","setOpenaiImgArr","awcbb_yhh_fun207","正在努力生成中","awcbb_yhh_fun208","awcbb_yhh_fun209","awcbb_yhh_fun210","awcbb_yhh_fun211","actions","_context2","程序出现故障,请刷新页面后重试",23,"input_loading_func","。","awcbb_yhh_fun215","trim","《","/text.htm","org_txt","_i","t0",66,474,535,573,583,"awcbb_yhh_fun214","_callee2","awcbb_yhh_fun213","awcbb_yhh_fun212","awcbb_yhh_fun217","&size=1024","/img.htm?msg=","awcbb_yhh_fun216","reloadOpenaiText","_context3","req_id","/text/txt.htm?id=","&#039;","'","&quot;",'"',"&gt;",">","&lt;","<","&(?!#?\\w+;)","&amp;","awcbb_yhh_fun221","发生错误,请稍后重试","req_type",29,158,168,"awcbb_yhh_fun220","_callee3","awcbb_yhh_fun219","awcbb_yhh_fun218","reloadOpenaiImg","_context4","/img/img.htm?id=",26,176,186,"awcbb_yhh_fun224","_callee4","awcbb_yhh_fun223","awcbb_yhh_fun222",5,"tpye","Warning","要不还是算了,第三方接口没有返回,我也没辙","dt","awcbb_yhh_fun226","已等待 ","。等待时间太长,建议刷新页面重新提交","awcbb_yhh_fun227","awcbb_yhh_fun225","tradeSuccess","_context5","/pay/is_pay.htm?id=","awcbb_yhh_fun231",142,152,"awcbb_yhh_fun230","_callee5","awcbb_yhh_fun229","awcbb_yhh_fun228","modules","rt","Zt","#app","ct","mount"],changlc={awcbb_yhh_fun0:{variablePool:{_regeneratorRuntime:"awcbb_yhh_fun1",asyncGeneratorStep:"awcbb_yhh_fun52",_asyncToGenerator:"awcbb_yhh_fun53",awcbb_yhh_fun58:"awcbb_yhh_fun58",L:null,awcbb_yhh_fun62:"awcbb_yhh_fun62",ut:null,awcbb_yhh_fun66:"awcbb_yhh_fun66",s:null,awcbb_yhh_fun67:"awcbb_yhh_fun67",awcbb_yhh_fun68:"awcbb_yhh_fun68",awcbb_yhh_fun69:"awcbb_yhh_fun69",awcbb_yhh_fun70:"awcbb_yhh_fun70",awcbb_yhh_fun71:"awcbb_yhh_fun71",awcbb_yhh_fun73:"awcbb_yhh_fun73",awcbb_yhh_fun74:"awcbb_yhh_fun74",awcbb_yhh_fun75:"awcbb_yhh_fun75",awcbb_yhh_fun77:"awcbb_yhh_fun77",awcbb_yhh_fun78:"awcbb_yhh_fun78",awcbb_yhh_fun80:"awcbb_yhh_fun80",awcbb_yhh_fun82:"awcbb_yhh_fun82",awcbb_yhh_fun83:"awcbb_yhh_fun83",awcbb_yhh_fun84:"awcbb_yhh_fun84",awcbb_yhh_fun85:"awcbb_yhh_fun85",awcbb_yhh_fun87:"awcbb_yhh_fun87",awcbb_yhh_fun88:"awcbb_yhh_fun88",awcbb_yhh_fun89:"awcbb_yhh_fun89",awcbb_yhh_fun90:"awcbb_yhh_fun90",awcbb_yhh_fun91:"awcbb_yhh_fun91",awcbb_yhh_fun92:"awcbb_yhh_fun92",K:null,awcbb_yhh_fun93:"awcbb_yhh_fun93",mt:null,awcbb_yhh_fun94:"awcbb_yhh_fun94",_t:null,pt:null,ft:null,gt:null,awcbb_yhh_fun95:"awcbb_yhh_fun95",xt:null,ht:null,yt:null,bt:null,vt:null,awcbb_yhh_fun96:"awcbb_yhh_fun96",wt:null,$t:null,Bt:null,Mt:null,kt:null,It:null,Tt:null,St:null,qt:null,awcbb_yhh_fun109:"awcbb_yhh_fun109",Dt:null,Ct:null,Nt:null,awcbb_yhh_fun143:"awcbb_yhh_fun143",At:null,Ot:null,Et:null,Lt:null,Ht:null,jt:null,awcbb_yhh_fun148:"awcbb_yhh_fun148",Vt:null,zt:null,Pt:null,Kt:null,Ut:null,Ft:null,Yt:null,awcbb_yhh_fun166:"awcbb_yhh_fun166",Rt:null,$:null,awcbb_yhh_fun180:"awcbb_yhh_fun180",y:null,awcbb_yhh_fun181:"awcbb_yhh_fun181",awcbb_yhh_fun183:"awcbb_yhh_fun183",awcbb_yhh_fun184:"awcbb_yhh_fun184",awcbb_yhh_fun191:"awcbb_yhh_fun191",Qt:null,Gt:null,Jt:null,awcbb_yhh_fun192:"awcbb_yhh_fun192",Wt:null,Zt:null,awcbb_yhh_fun202:"awcbb_yhh_fun202",awcbb_yhh_fun203:"awcbb_yhh_fun203",awcbb_yhh_fun204:"awcbb_yhh_fun204",awcbb_yhh_fun205:"awcbb_yhh_fun205",awcbb_yhh_fun206:"awcbb_yhh_fun206",awcbb_yhh_fun207:"awcbb_yhh_fun207",awcbb_yhh_fun208:"awcbb_yhh_fun208",awcbb_yhh_fun209:"awcbb_yhh_fun209",awcbb_yhh_fun210:"awcbb_yhh_fun210",awcbb_yhh_fun211:"awcbb_yhh_fun211",awcbb_yhh_fun212:"awcbb_yhh_fun212",awcbb_yhh_fun216:"awcbb_yhh_fun216",awcbb_yhh_fun218:"awcbb_yhh_fun218",awcbb_yhh_fun222:"awcbb_yhh_fun222",awcbb_yhh_fun225:"awcbb_yhh_fun225",awcbb_yhh_fun228:"awcbb_yhh_fun228"},zhili:[74112,0,38959,211,67404,841,64954,250,23185,280,74112,300,43071,325,32545,331,9820,344,57979,346,26653,356,6713,363,58361,379,95645,392,74112,396,74112,402,26653,412,41646,419,38959,420,64954,427,74112,441,38959,443,57979,447,41646,450,43071,453,74112,456,9820,460,7823,463,67404,468,18922,480,15774,569,59830,703,60605,720,43071,767,78762,794,95311,811,7823,840,64529,844,74112,876,44194,878,97059,906,80796,926,26653,928,47300,929,58361,931,43071,933,60605,935,60605,937,41646,938,9820,939,9820,940,38959,962,64954,966,47300,990,97059,1e3,9820,1010,57979,1020,15348,58511,80796,250,71658,27370,0,14266,7651,89106,251,5497,47300,251,3697,44194,252,7651,18922,280,87026,5659,80129,59830,300,72389,85408,37998,278,83916,64147,65802,284,30506,95311,284,52050,44194,301,22433,47300,325,52396,10733,44588,7823,284,77380,41646,315,54009,57979,331,50073,53012,7651,43071,284,72389,23185,332,9103,6713,344,52050,93438,34933,97059,284,29478,95645,345,54009,47300,346,71658,38232,84670,41646,284,71658,67404,347,84670,9820,356,22541,85445,9103,7823,284,82317,47300,357,84670,44194,363,563,53012,64153,64954,284,22541,6713,364,63531,58361,379,72583,56020,9103,41646,284,23098,26653,380,64147,6713,392,48530,56020,2219,32545,284,52396,64529,393,11765,74112,396,87026,8087,53596,40180,284,52396,67404,397,61752,15774,402,57404,7286,5497,58960,284,3697,57979,403,85728,58960,412,72583,8080,85728,40180,284,72389,9820,413,54009,78762,419,50073,52431,8414,62566,284,29478,62566,405,80129,80796,420,22745,28050,76204,97059,284,53955,15774,421,76204,9820,427,58752,87350,7651,67404,284,23098,44194,428,64277,41646,441,22541,85756,58511,64529,284,29478,43071,442,88198,95645,443,57404,11748,8414,95311,284,99362,58361,285,26396,7823,349,58511,97059,447,57404,57481,9820,352,84670,15774,450,74470,71093,9820,286,18087,47300,453,61091,22061,52821,53596,74112,284,52396,40180,454,54009,26653,456,93963,78309,76204,62566,284,71658,62566,457,63531,67404,460,58854,7286,76204,41646,463,29478,30506,4427,464,64153,97059,468,58752,34933,43021,469,82265,41646,470,23185,471,24361,30506,8184,472,81395,80796,470,23185,473,60637,63531,51516,474,31460,74112,470,58960,475,22061,9103,15795,476,11765,58361,480,58752,2219,44194,469,72583,29585,1,8414,4427,481,87764,57979,470,59830,482,71606,80129,79263,483,36422,62566,470,60605,484,22061,53596,8184,485,98945,60605,486,77,67170,51516,487,56027,58361,470,23185,488,61744,68343,65921,489,29172,78762,490,64954,491,96762,78762,492,80129,59830,569,58854,25022,80129,93083,570,53596,64954,570,29478,36696,86453,26653,571,54884,78762,465,47670,54884,22433,38959,464,23098,52058,2,88198,96735,572,68012,41646,10,6713,27,82077,62566,470,44194,573,24361,44588,79263,574,11451,9820,10,59830,119,85171,7823,470,47300,575,71093,19853,51516,576,29172,62566,10,7823,27,21585,43071,470,44194,577,82077,88198,75009,578,62733,26653,470,18922,579,16485,76204,65802,580,39561,9820,486,69374,72623,8184,581,90191,62566,470,15774,582,96762,22433,28196,583,76029,41646,470,57979,584,61744,53596,25791,585,63903,62566,490,9820,586,71093,80796,492,84670,23185,703,61091,60637,30506,89542,704,72623,57979,704,3697,26972,10151,58361,571,32736,9820,705,46782,40640,11765,40180,464,30033,27370,2,19853,99730,706,13252,64954,470,43071,707,96762,80129,15308,708,83916,62566,490,78762,78,22061,6713,492,85728,23185,720,52050,35714,33414,8184,721,86453,18922,486,70041,2219,41837,722,63903,59830,470,95311,475,34264,76204,75559,723,67404,477,63903,78762,470,59830,478,71093,41646,95,58960,119,18695,34933,44194,479,82317,98239,4,58511,74976,724,87764,58960,470,46212,482,75467,53596,8184,725,51153,58361,470,41646,484,25022,64153,65802,726,13252,6713,490,47300,639,96225,64529,492,30506,58361,767,93963,60637,30506,96735,768,34228,58960,486,32736,64277,99730,769,93754,6713,486,98009,33414,99730,770,11451,40180,470,43071,475,85288,64147,15308,771,64954,477,78733,59830,470,26653,478,4486,64954,95,15774,119,25141,9103,80796,479,3697,940,4,44588,94555,772,76035,60605,470,9820,482,75467,61752,8184,773,87764,47300,470,26653,774,44605,85408,75559,775,81395,23185,490,23185,776,34264,80796,492,72623,6713,794,22541,4486,64153,15308,795,86453,78733,57979,796,64529,709,57481,7823,797,43071,621,4486,41646,798,36422,43071,799,15774,119,16188,59137,7823,800,80796,119,47,44605,25022,17775,62733,58361,796,64529,621,33352,78762,143,9820,491,82077,78762,798,90464,40180,799,60605,119,5619,24361,80796,713,59830,801,16485,41646,800,64529,27,16188,4486,96225,41646,802,88198,62566,572,77380,4486,83848,78733,6713,796,43071,288,96225,62566,143,59830,586,87847,60605,798,83916,32545,799,7823,119,95880,4486,44194,800,58960,119,5428,71093,25022,80796,802,64277,26653,706,30033,33352,8934,11451,38959,796,95645,803,96762,6713,143,59830,639,34264,78762,798,90464,58361,799,7823,119,5428,71093,78762,800,6713,119,44380,64879,22061,58960,802,67170,67404,768,10057,72563,42991,31460,47300,796,26653,804,96225,18922,143,9820,776,25022,44194,798,83815,58960,799,78762,119,83991,71093,40180,800,64529,119,30732,71606,96225,74112,802,7651,74112,795,48530,72563,43150,14266,46212,796,9820,805,59137,7823,802,30506,6713,721,87026,92215,58361,798,93283,58960,800,46212,119,91582,75467,57481,24937,53596,64189,806,81395,46212,807,7651,44194,808,82317,76020,0,99379,74112,809,84670,95645,806,22541,82077,44588,60605,810,10057,55207,1,85408,65921,289,85728,80796,811,58752,63531,38959,289,82317,58960,812,71807,63363,1,54009,15774,813,52980,58361,814,22745,74112,815,59830,816,64529,307,92686,78309,36422,15774,817,78762,30,71093,33414,9820,813,57404,23185,40,43193,63363,1,44588,94555,355,33414,38959,840,74470,44588,7823,0,52980,5879,0,58361,145,52980,90610,1,58511,95311,841,48530,72218,1,44588,26653,844,50073,72623,43071,355,43193,41646,845,87026,58361,846,22541,95645,847,10057,97127,2,22433,59830,876,87026,9103,78762,878,58752,18087,23185,355,22541,43071,845,71658,44194,877,87026,9820,847,58854,55207,2,13252,64529,470,58361,879,87847,72623,51516,880,29172,38959,470,41646,881,9206,63531,33043,882,56027,32545,490,60605,883,4486,64954,492,88198,40180,906,74470,85171,34933,93083,907,58511,97059,907,48530,67057,67057,74112,571,77548,47300,908,84051,98009,53596,47300,464,58752,29585,2,64147,37998,909,11451,67404,89,62733,26653,253,6713,95,24361,59830,515,93754,21585,62566,738,3933,96762,95311,754,35896,92215,64529,527,76035,32545,528,80796,119,44380,24361,58361,563,26653,119,95880,87847,26653,408,41646,910,34264,33352,32545,495,78762,30,57481,78762,730,58960,30,60637,57979,778,78762,30,44298,6713,536,13252,67404,408,90466,57979,911,224,26653,912,66595,43071,913,80570,47300,914,46792,18922,915,57126,74112,916,62205,24361,6713,917,58361,27,85171,41646,593,6713,95,44298,57979,537,64954,918,44605,6713,919,23185,27,71093,92215,9820,885,45787,87764,60605,887,41646,27,8578,99379,80796,143,95645,920,57481,6713,749,57979,491,44605,32736,59557,60605,887,6713,119,64650,33352,15774,143,40180,921,9206,78762,749,58960,776,35714,3410,14266,64529,887,64954,119,16188,61744,59830,143,80796,922,85288,47300,749,62566,639,64879,80570,76035,64954,887,7823,119,6568,25022,64954,143,40180,923,75467,78762,749,57979,586,82077,24937,96762,44194,785,23185,30,34264,58361,538,76029,80796,594,32545,27,9206,62566,408,7823,30,33352,47300,593,26653,95,64879,71093,34264,57979,924,78733,60605,925,22433,44194,926,71658,92215,97059,927,64147,80796,928,52396,16485,95645,266,44588,7823,929,3697,82077,95645,930,64147,6713,931,52980,57481,38959,932,72623,60605,933,77380,72563,7823,934,80129,80796,935,10810,61744,62566,622,68343,38959,937,57404,85288,38959,601,53596,95645,938,52396,25022,95311,298,72623,95311,939,53955,72563,6713,759,63531,64954,940,23098,22061,59137,23185,941,56027,64954,496,85408,64529,962,30033,33352,80796,733,34933,47300,966,72389,85288,44194,967,34933,78762,990,3697,44605,41646,991,9103,40180,1e3,22541,85171,15774,945,2219,43071,1010,23098,96762,43071,1011,2219,9820,1020,29478,71606,82077,62566,1021,51153,44298,61752,74112,1022,23098,27118,1,30506,65802,1023,57979,1024,76204,60605,1023,48530,44588,59830,289,74470,2219,58361,909,52050,53596,58960,1025,74470,45100,1,9820,847,52396,7790,1,44194,847,57404,59324,1,74112,1026,77380,29585,1]},awcbb_yhh_fun1:{variablePool:{awcbb_yhh_fun2:"awcbb_yhh_fun2",exports:null,Op:null,hasOwn:null,defineProperty:null,awcbb_yhh_fun3:"awcbb_yhh_fun3",$Symbol:null,iteratorSymbol:null,asyncIteratorSymbol:null,toStringTagSymbol:null,define:"awcbb_yhh_fun4",err:null,awcbb_yhh_fun5:"awcbb_yhh_fun5",wrap:"awcbb_yhh_fun6",tryCatch:"awcbb_yhh_fun7",ContinueSentinel:null,Generator:"awcbb_yhh_fun8",GeneratorFunction:"awcbb_yhh_fun9",GeneratorFunctionPrototype:"awcbb_yhh_fun10",IteratorPrototype:null,awcbb_yhh_fun11:"awcbb_yhh_fun11",getProto:null,NativeIteratorPrototype:null,Gp:null,defineIteratorMethods:"awcbb_yhh_fun12",AsyncIterator:"awcbb_yhh_fun15",makeInvokeMethod:"awcbb_yhh_fun24",maybeInvokeDelegate:"awcbb_yhh_fun26",pushTryEntry:"awcbb_yhh_fun27",resetTryEntry:"awcbb_yhh_fun28",Context:"awcbb_yhh_fun29",values:"awcbb_yhh_fun30",doneResult:"awcbb_yhh_fun32",awcbb_yhh_fun33:"awcbb_yhh_fun33",awcbb_yhh_fun34:"awcbb_yhh_fun34",awcbb_yhh_fun35:"awcbb_yhh_fun35",awcbb_yhh_fun36:"awcbb_yhh_fun36",awcbb_yhh_fun37:"awcbb_yhh_fun37",awcbb_yhh_fun39:"awcbb_yhh_fun39",awcbb_yhh_fun40:"awcbb_yhh_fun40",awcbb_yhh_fun41:"awcbb_yhh_fun41",awcbb_yhh_fun43:"awcbb_yhh_fun43",awcbb_yhh_fun44:"awcbb_yhh_fun44",awcbb_yhh_fun45:"awcbb_yhh_fun45",awcbb_yhh_fun47:"awcbb_yhh_fun47",awcbb_yhh_fun48:"awcbb_yhh_fun48",awcbb_yhh_fun49:"awcbb_yhh_fun49",awcbb_yhh_fun50:"awcbb_yhh_fun50",awcbb_yhh_fun51:"awcbb_yhh_fun51"},zhili:[32545,2,6713,13,58361,31,26653,33,46212,52,6713,71,67404,38,44194,137,57979,60,46212,55,43071,152,18922,151,60605,45,58361,98,18922,129,46212,173,80796,42,23185,58,57979,94,26653,144,78762,148,58960,150,74112,153,18922,158,95645,159,46212,162,23185,170,97059,177,58960,181,80796,190,9820,195,74112,196,78762,198,47300,202,60605,204,2158,89185,9103,7823,0,18087,47300,2,50073,5659,11451,67170,33043,1,2219,15774,3,71807,38959,4,52396,18087,94555,5,63531,43071,5,77380,43071,6,22745,44588,33043,7,8414,58361,3,3697,9820,8,50073,47229,4,63531,60605,13,58854,5497,33043,8,72623,95311,14,93963,24157,62566,15,80981,54335,3,68012,74241,4,34933,58960,14,10057,80129,96735,16,64277,58361,16,52396,59830,17,52396,25128,2,9820,18,44588,64189,19,88198,43071,16,22541,18922,20,58752,53,2,67404,21,85728,94555,22,5497,41646,16,77380,6713,23,29478,1853,2,58960,24,85728,93083,25,17133,10,13,1,82265,62566,30,76204,58960,31,77380,69977,2,45243,22433,95645,32,43932,44588,44194,31,22433,46212,33,71658,8080,67717,2504,67170,67404,1,87026,95311,52,72623,95645,52,72389,11748,11451,64153,94555,53,59557,8414,79263,54,64147,64954,54,77380,85728,38959,19,71658,30506,78762,55,22541,68343,7823,31,3697,59324,3,80129,32545,3,99362,46212,56,29478,22433,41837,57,63531,57979,57,23098,38269,19,86453,85408,59830,58,563,74677,1,76204,97059,57,71807,58767,1,18087,23185,57,30033,29585,1,63531,25791,59,85408,58960,59,74470,7315,9,85408,74112,5,77380,85728,7823,59,10810,4886,2977,17,2219,64954,59,50073,53596,43071,19,58854,64277,59830,7,10810,43071,50,563,27118,2,25072,12,64147,74112,54,11765,95645,59,72583,80837,7651,60605,54,52980,64147,95645,60,52396,78762,4,67170,58960,38,50073,47300,4,22433,95311,54,52050,5497,26653,3,77380,15774,40,3697,27118,1,25655,53596,95645,38,52050,57979,4,10810,5659,63531,32545,60,87026,80796,4,93963,44588,64189,61,63531,58960,137,71658,80796,4,54009,43071,60,71807,38354,72623,26653,61,61091,32545,138,62733,57979,12,19853,15774,60,52050,71606,64529,28,26653,27,64650,44605,88198,58960,8,43193,31162,3,98228,11765,46212,60,10057,58960,138,29172,46212,12,64277,78762,137,87026,97453,43071,28,15774,27,5185,75467,68343,95311,8,58752,76020,3,45196,19853,59830,137,52980,60605,139,8414,67404,60,52050,64277,59830,25,82317,41646,137,85728,95311,31,48530,76020,3,52431,33414,74112,1,52050,67404,140,5497,23185,144,30033,10733,30506,95311,1,71658,78762,145,53596,43071,148,10810,25655,84670,44194,1,87026,58361,149,19853,64954,150,29478,93438,64153,9820,151,48530,60605,4,71658,33414,95645,152,563,29585,1,58306,34933,95311,151,71658,7823,4,52050,76204,58361,22,3697,67170,41646,153,71658,2219,47300,31,87026,90610,3,75284,76204,58960,1,53955,57979,151,64147,64529,151,10810,82315,54009,95311,1,52396,7823,154,8414,62566,158,10810,8087,9103,32545,61,563,54009,26653,152,61091,27370,1,48861,63531,43071,61,43193,68343,62566,25,22745,38959,38,85728,80796,31,48530,89391,3,14239,34933,43071,61,52050,9103,57979,19,53955,30506,6713,159,93963,61752,43071,31,52396,27118,3,72601,61752,80796,61,29478,26653,160,33414,40180,162,58854,53596,78762,31,29478,91090,3,94956,58511,6713,1,53955,9820,163,80129,46212,170,29478,93438,54009,26653,1,10057,62566,58,7651,67404,58,52050,80611,54009,41646,42,61091,95311,4,14266,74112,138,33414,62566,42,10057,44298,80796,130,64147,64529,177,87026,82077,7823,178,61752,23185,181,93963,60637,67404,101,19853,26653,190,30033,21585,40180,100,80129,58960,195,52980,22061,78762,194,88198,97059,196,71658,96762,40180,197,64147,43071,198,52980,57481,95645,199,7651,32545,202,10057,87847,64529,203,80129,64529,204,72583,82077,80611,22433,23185,1,29478,79180]},awcbb_yhh_fun2:{variablePool:{},zhili:[87017,33529,53596,97059,1,61091,45243]},awcbb_yhh_fun3:{variablePool:{obj:null,key:null,desc:null},zhili:[68328,58960,9,38959,10,95645,11,67320,85408,6713,9,82317,84670,57979,10,61091,67170,6713,11,10057,44194,12,52980,5659]},awcbb_yhh_fun4:{variablePool:{obj:null,key:null,value:null},zhili:[70676,78762,9,67404,10,23185,12,23282,34933,64954,9,52980,9103,7823,10,30033,83916,64954,12,54009,60605,12,30033,33352,46212,26,67404,27,76,59137,74112,28,44194,27,6568,57481,59830,29,60605,27,92021,85288,18087,43071,3,10810,57979,8,23098,5879,3,69301,7651,60605,9,30033,67170,95645,10,22745,77380,2504]},awcbb_yhh_fun5:{variablePool:{obj:null,key:null,value:null},zhili:[15348,23185,9,44194,10,43071,12,27863,64277,44194,9,22541,58511,97059,10,10057,80129,32545,12,77380,52821,93555]},awcbb_yhh_fun6:{variablePool:{innerFn:null,outerFn:null,self:null,tryLocsList:null,protoGenerator:null,generator:null,context:null},zhili:[64106,64954,34,7823,35,78762,36,97059,37,38549,9103,95645,35,58752,92588,12,84670,59830,38,52050,18087,47300,35,23098,9820,4,58752,48255,1701,6,7651,58960,38,10810,18564,4,85408,95645,35,43193,61752,56565,39,2219,62566,39,77380,74112,4,99362,64277,40180,3,99362,95311,40,71658,27370,1,9103,33043,41,34933,15774,37,30033,9409,1,39834,22433,15774,42,99362,44095,1,44588,15795,43,18087,32545,41,52980,95645,44,41709,62566,12,67170,58960,34,29478,64153,57979,36,29478,67170,95645,43,71658,19853,60605,45,53955,68185,3,99379,85408,15774,8,74470,76020,3,15276,33414,64954,41,71807,25284]},awcbb_yhh_fun7:{variablePool:{fn:null,obj:null,arg:null,err:null},zhili:[15348,64954,46,74112,9,62566,47,4004,32729,28,19,1,41709,59830,48,9820,49,57481,23185,47,54009,78762,9,58752,8414,95645,47,58854,33414,41646,46,43193,95645,50,82317,59324,2,97453,93555,99221,64153,23185,32,39203,78733,80796,48,9820,51,61744,64954,47,88198,6713,32,72389,96225,45243,53031,45607]},awcbb_yhh_fun8:{variablePool:{},zhili:[15348,40365]},awcbb_yhh_fun9:{variablePool:{},zhili:[32517,67320]},awcbb_yhh_fun10:{variablePool:{},zhili:[26779,93730]},awcbb_yhh_fun11:{variablePool:{},zhili:[15348,72238,35985,48422]},awcbb_yhh_fun12:{variablePool:{prototype:null,awcbb_yhh_fun13:"awcbb_yhh_fun13"},zhili:[59830,64,60433,23185,4,26431,80129,46212,64,74470,48503,26653,65,40640,9820,51,57126,74112,66,83848,38959,67,74470,45100,1]},awcbb_yhh_fun13:{variablePool:{method:null,awcbb_yhh_fun14:"awcbb_yhh_fun14"},zhili:[80796,63,3749,26653,62,81501,7651,58361,4,58752,19853,23185,62,58854,7651,57979,63,71807,53596,60605,31,82317,97127,3]},awcbb_yhh_fun14:{variablePool:{arg:null},zhili:[15348,59830,47,96088,8414,9820,62,30033,84670,47300,47,23098,28567,80796,44,52396,5879,2,51512]},awcbb_yhh_fun15:{variablePool:{generator:null,PromiseImpl:null,invoke:"awcbb_yhh_fun16",previousPromise:null,awcbb_yhh_fun21:"awcbb_yhh_fun21"},zhili:[47300,79,6713,87,70676,58960,41,40180,68,48183,2375,58960,44,14266,74112,12,64277,64954,87,74470,22061,64153,41646,8,93963,91504,3]},awcbb_yhh_fun16:{variablePool:{method:null,arg:null,resolve:null,reject:null,record:null,result:null,value:null,awcbb_yhh_fun17:"awcbb_yhh_fun17",awcbb_yhh_fun18:"awcbb_yhh_fun18",awcbb_yhh_fun19:"awcbb_yhh_fun19",awcbb_yhh_fun20:"awcbb_yhh_fun20"},zhili:[58960,77,78762,80,47300,82,97059,83,60433,59830,62,40180,47,58361,69,46212,70,48183,88198,57979,41,30033,11765,44194,62,10810,58854,30506,38959,41,10057,33414,40180,47,99362,72623,38959,71,74470,63363,3,11765,78803,72,7651,26653,72,50073,9820,48,58854,46212,51,56585,9169,2,52189,111,8414,6713,72,29478,58361,47,10810,84670,51516,73,58511,46212,73,58752,9820,12,23098,68343,89106,12,64277,58361,12,58752,21450,8,88198,64529,12,53955,12483,40180,74,3330,33970,15,64153,46212,12,30033,95645,75,72623,18922,7,22745,95311,50,22541,91504,2,25219,28,64147,43071,77,22541,68343,64954,80,99362,72623,41646,12,71807,22433,59830,68,3697,80796,69,77380,95012,1,46212,81,58752,27370,2,49852,29,64147,32545,82,22745,72623,62566,83,22541,61752,58361,12,82317,58361,75,22745,67170,59830,68,30033,67404,69,99362,27118,1,26653,81,99362,29362,2,79180,54009,97059,72,10057,67404,47,71658,9103,7823,70,57404,45100,1]},awcbb_yhh_fun17:{variablePool:{unwrapped:null},zhili:[68328,62566,76,70261,8414,43071,73,58752,58361,12,76204,46212,76,43193,27557,72623,78762,73,71658,68343,57979,69,23098,940,1]},awcbb_yhh_fun18:{variablePool:{error:null},zhili:[68328,6713,78,89185,18922,51,7651,57979,78,58854,63531,60605,69,52050,7651,23185,70,563,22433,58361,79,82317,95012,4,79180]},awcbb_yhh_fun19:{variablePool:{value:null},zhili:[33338,78762,12,54997,57979,65,8414,80796,12,30033,63531,67404,69,93963,22433,58960,70,43193,8414,23185,79,10057,69977,4]},awcbb_yhh_fun20:{variablePool:{err:null},zhili:[17105,23185,32,21418,64954,51,2219,97059,32,61091,30506,74112,69,82317,80129,78762,70,82317,76204,57979,79,10810,5879,4]},awcbb_yhh_fun21:{variablePool:{method:null,arg:null,callInvokeWithMethodAndArg:"awcbb_yhh_fun22"},zhili:[67404,86,80562,64529,62,41646,47,26569,85728,64529,85,19853,41646,85,10810,66558,8,53596,62566,86,93963,31162,0,37455,17,64277,6713,86,50073,53596,59830,86,23098,5497,62566,85,52396,32545,81,61091,58767,2,85445,88999]},awcbb_yhh_fun22:{variablePool:{awcbb_yhh_fun23:"awcbb_yhh_fun23"},zhili:[18922,84,70676,80124,18087,40180,84,563,19853,74112,68,3697,40403,1,21204]},awcbb_yhh_fun23:{variablePool:{resolve:null,reject:null},zhili:[60433,64954,69,80796,70,9832,8414,44194,62,22541,19853,23185,47,72583,9103,32545,69,43193,22433,6713,70,72389,76204,18922,79,50073,29585,4]},awcbb_yhh_fun24:{variablePool:{innerFn:null,self:null,context:null,state:null,awcbb_yhh_fun25:"awcbb_yhh_fun25"},zhili:[95645,106,5169,64954,34,59830,36,38959,43,86572,15774,88,8414,78803,89,5497,97059,106,23098,74054]},awcbb_yhh_fun25:{variablePool:{method:null,arg:null,delegate:null,delegateResult:null,record:null},zhili:[68328,7823,62,95311,47,86572,19853,60605,89,3697,58361,90,24867,51849,2,86650,9,26653,91,67170,57979,92,71658,76385,1,48504,9103,46212,89,57404,62566,93,70757,24642,2,74241,23,11765,40180,62,61091,15774,51,98535,35828,2,55513,5,2219,97059,47,48530,49545,44588,7823,94,29478,29362,0,53031,7651,64954,43,563,64954,62,72623,40180,62,53955,25655,11765,95311,43,71807,44194,47,2219,67404,47,43193,66095,95311,95,97059,96,26420,335,53596,58361,43,23098,95311,97,77380,7651,96735,97,64147,44194,97,48530,24642,2,13780,45,67170,47300,97,48530,2219,41646,43,93963,30506,43071,98,93963,98239,2,64277,51516,99,64153,46212,99,10810,22695,2,44039,20,5497,43071,53,52050,22433,7823,99,71658,7275,35837,2,55098,2,29908,275,30506,78762,99,563,21204,54009,64529,43,29478,6713,62,22745,67404,65,37867,67772,87,85728,57979,43,563,9820,62,61091,74112,51,81616,9169,32,7651,41646,43,74470,23185,62,30033,59830,66,77007,38269,18,60605,66,63531,78762,43,58752,40180,47,93963,33414,26653,43,74470,97059,100,52980,68185,2,55098,41,64147,64954,89,43193,38959,88,62328,11383,2,55513,14,7651,38959,89,9820,93,25655,80129,78762,43,23098,6713,47,71658,62850,44588,95311,43,10810,58960,47,74470,18087,38959,43,23098,58960,101,52980,76020,1,29908,28,64147,95645,43,48530,67404,102,22433,47300,43,72583,64954,103,18087,47300,43,563,15774,47,43193,88857,72623,43071,43,50073,64954,103,82317,38354,68343,64954,89,59830,90,38232,68343,44194,34,29478,34933,40180,36,23098,8414,62566,43,10810,72623,95645,71,22541,95012,3,80129,94555,72,63531,95645,72,58752,47300,48,74470,58960,49,10045,9169,2,54324,59,8414,9820,89,18087,9820,43,52050,80796,104,77380,31824,4,6713,105,37455,2,40180,93,8080,61752,97059,53,30033,2219,15774,72,10057,57979,47,87026,24867,70299,2,74241,2,24153,65,90191,95645,12,30506,9820,72,52980,58960,47,43193,22061,44194,104,53596,97059,43,53955,32545,104,93963,16485,5377,22433,41646,72,22541,95311,48,48530,18922,51,10045,17054,31,33414,15774,89,80796,93,11748,2219,47300,43,48530,74112,62,23185,51,56020,54009,26653,43,23098,32545,47,18087,32545,72,93963,47300,47,52980,78309,40180,95,47329,-339]},awcbb_yhh_fun26:{variablePool:{delegate:null,context:null,methodName:null,method:null,record:null,info:null},zhili:[66887,95645,97,95645,43,72115,64147,58361,43,74470,59830,62,58854,85408,4427,107,63531,60605,97,74470,58960,17,58854,7651,7823,107,61091,58752,67170,37998,62,61752,67404,62,71658,5497,23185,108,30033,74306,79810,2,60038,125,67170,46212,43,3697,47300,97,58361,95,4623,11765,9820,107,563,80796,51,74306,25595,10,85408,23185,97,82317,74112,17,563,64529,66,82317,92588,45,34933,58960,43,43193,59830,62,60605,66,80611,68343,6713,43,48530,44194,47,53596,18922,108,72583,88857,67170,58361,97,93963,9103,64529,43,93963,7651,58361,98,58854,95012,2,58306,8414,60605,43,563,97059,62,72389,95645,51,74306,47229,43,34933,67404,107,99362,64529,66,6794,92588,34,44588,46212,43,43193,18922,62,60605,51,7286,84670,46212,43,52980,60605,47,57979,109,34933,26653,107,82317,44194,110,44291,83813,5497,78762,111,563,40403,1,93438,95645,95,80129,80796,53,10057,38302,53596,58361,62,72389,19853,6713,97,52980,44194,17,57404,18087,58960,43,22541,80796,47,10057,5497,58361,71,43193,52058,3,18087,99730,72,30506,78762,72,82317,15774,48,43193,7823,51,61479,25979,2,13780,37,34933,43071,43,52396,60605,62,62566,51,87350,76204,74112,43,30033,40180,47,84670,41646,72,82317,43071,47,71658,11125,76204,57979,43,22745,64954,97,64529,95,85756,9103,64954,53,71807,99221,54009,32545,72,72583,26653,47,58854,85408,64189,112,61752,41646,112,563,53320,39,80129,26653,43,93963,23185,62,95645,51,78309,64277,59830,43,87026,7823,47,95645,113,76204,18922,111,93963,26012,1,50046,64277,57979,43,563,7823,97,6713,95,4623,64153,95311,53,52980,74241,95,9103,57979,112,30033,59830,104,52050,78395,6,34933,23185,112,43193,52878,80,34933,46212,43,72583,30506,64954,97,72583,23185,114,52980,64147,59830,112,74470,97059,12,74470,7286,44588,80796,43,10057,58960,65,63531,64529,97,72389,26653,115,52396,11125,34933,95311,43,48530,62566,62,22745,62566,66,56876,33970,22,64147,57979,43,93963,38959,62,15774,65,10733,64147,43071,43,10057,67404,47,85728,64954,108,3697,38354,58361,95,19853,23185,43,29478,18922,97,58361,95,10733,85728,64529,53,10810,48422]},awcbb_yhh_fun27:{variablePool:{locs:null,entry:null},zhili:[60433,41646,116,89185,29172,9820,117,67170,60605,116,10057,15774,27,563,24361,44588,49304,118,53596,58960,116,58854,9820,119,89786,28741,21,80129,26653,118,52980,44194,120,33414,59830,116,53955,64954,119,29478,88857,34933,95645,118,52980,59830,120,87026,22433,6713,116,22541,67404,121,26210,21450,30,68343,97059,118,48530,47300,122,9103,97059,116,77380,6713,121,52050,52821,72623,59830,118,72389,26653,123,68343,74112,116,77380,58361,124,74470,50046,47300,95,9103,80796,118,10810,78993,78762,125,48530,18922,126,77380,68185,1]},awcbb_yhh_fun28:{variablePool:{entry:null,record:null},zhili:[78808,43071,118,41474,3615,85408,64529,118,30033,6713,127,52980,9409,1,90464,9103,78803,72,22433,7823,72,82317,78762,48,59830,49,85756,18087,74112,72,48530,7823,47,29052,41632,68343,15774,118,52396,23185,127,80129,60605,72,74470,56020,18922,95]},awcbb_yhh_fun29:{variablePool:{tryLocsList:null},zhili:[66932,80796,37,72115,74880,78762,125,99151,63903,59830,117,97059,128,33352,54884,85756,22433,18922,129,58752,74880,80129,59830,37,72583,62566,67,72583,58767,2,8902,58361,27,64965,39014,64954,130,71807,76020,1]},awcbb_yhh_fun30:{variablePool:{iterable:null,iteratorMethod:null,i:null,next:null,awcbb_yhh_fun31:"awcbb_yhh_fun31"},zhili:[97059,136,78808,26653,131,55173,61752,58960,131,22745,70299,2,20376,97,54009,74112,131,71658,76204,18922,19,52396,23098,8414,89542,132,68343,47300,132,99362,54335,2,55513,14,68343,97059,131,53955,84670,41646,132,22541,32545,50,563,27370,1,5377,80129,7823,131,22745,64954,65,10057,72186,59830,15,89595,24642,2,73062,5,67170,41646,131,52980,2029,5497,43071,131,52980,59830,133,22745,54009,64529,134,10057,98239,1,65991,96274,2,6341,25,40180,119,43209,9103,94555,135,63531,62566,136,61091,8414,74976,65,64153,58960,65,10810,74112,65,63531,59830,65,58854,30381,35812,63903,23185,65,85728,46212,94,87026,82077,10515]},awcbb_yhh_fun31:{variablePool:{},zhili:[3749,21418,61752,40180,131,72583,15774,133,58854,68343,78762,135,6713,119,68343,59830,135,52980,83270,98643,8414,47300,135,57404,79350,1295,54,9103,57979,131,563,67170,64529,135,74470,84670,57979,7,58854,41646,50,10810,72218,2,66558,2,55513,31,80129,47300,65,93963,78762,12,2219,7823,131,43193,84670,80796,135,563,10810,87350,63531,59830,65,72389,57979,104,38959,119,71268,8080,8414,47300,65,30033,79180,55513,-79,54009,64529,65,99362,78762,12,22433,44194,108,30033,27557,54009,57979,65,23098,97059,104,15774,27,67805,93438,85728,32545,65,58752,19567]},awcbb_yhh_fun32:{variablePool:{},zhili:[3306,71340,41709,95311,12,64153,58361,108,93963,99379,18922,104,59830,27,20757,96762,93777]},awcbb_yhh_fun33:{variablePool:{genFun:null,ctor:null},zhili:[15348,67404,141,98809,72623,62566,141,72389,29583,26653,15,85809,89484,7,2219,23185,141,48530,97059,138,563,85728,96735,142,63531,47300,142,61091,92021,12569,32275,30,8414,7823,137,57404,80129,43071,142,563,67096,91422,19,85728,26653,142,10810,58960,139,58752,43341,7,5497,38959,142,52980,59830,143,58854,59830,137,30449,79180]},awcbb_yhh_fun34:{variablePool:{genFun:null},zhili:[26779,32545,141,72115,80129,15774,3,10810,15774,146,58752,25979,29,63531,58361,141,52050,41646,147,67170,9820,60,10810,52431,44588,23185,141,72583,64277,43071,25,50073,64529,137,22433,95645,31,82317,31787,3,29908,17,7651,23185,141,22745,72623,60605,60,87026,44588,78762,3,58854,32545,146,563,97127,2,64277,57979,141,53955,58960,4,63531,23185,61,22541,84670,95311,3,72389,95645,40,30033,41411,1,88857,11765,58361,141,72583,10515]},awcbb_yhh_fun35:{variablePool:{arg:null},zhili:[70676,43071,47,72238,56027,15774,75,9103,41646,47,82317,92215,41373]},awcbb_yhh_fun36:{variablePool:{},zhili:[2158,71340,27212,70130]},awcbb_yhh_fun37:{variablePool:{innerFn:null,outerFn:null,self:null,tryLocsList:null,PromiseImpl:null,iter:null,awcbb_yhh_fun38:"awcbb_yhh_fun38"},zhili:[7823,157,55451,40180,34,47300,35,74112,36,32545,37,62566,68,4004,72623,78762,68,23098,80796,27,45958,62328,28741,12,64147,26653,68,64277,15774,155,57404,80837,85408,64529,68,43193,68343,23185,34,72583,85408,64954,35,48530,2219,32545,36,10810,53596,18922,37,3697,85408,95645,52,71658,58767,4,58511,80796,68,58854,64153,46212,151,50073,42106,2,18087,29242,156,61752,40180,35,93963,2219,97059,1,10810,43071,140,22541,47409,1,79810,20,63531,9820,157,22745,61752,32545,156,52050,23185,65,82317,31162,0,95311,81,22745,91090,1,44039,4,64147,38959,156,29478,66353]},awcbb_yhh_fun38:{variablePool:{result:null},zhili:[78808,15774,73,81501,85728,95311,73,52050,18922,104,72583,51849,11,76204,18922,156,53955,15774,65,72583,95012,0,52189,7,44588,62566,73,22541,43071,12,72389,25284]},awcbb_yhh_fun39:{variablePool:{},zhili:[33338,54997,28567,10515]},awcbb_yhh_fun40:{variablePool:{},zhili:[26779,9832,78762,161,70130]},awcbb_yhh_fun41:{variablePool:{val:null,object:null,keys:null,key:null,awcbb_yhh_fun42:"awcbb_yhh_fun42"},zhili:[64954,169,4062,64529,164,4004,88198,23185,164,87026,80129,74112,3,52980,89391,1,85728,75009,74,52861,33414,78803,163,64277,18922,74,10810,11832,62566,27,84670,74976,165,58511,44194,165,72389,2219,43071,166,53955,26653,133,50073,39404,82506,31,5497,9820,10,84670,95645,166,563,72623,97059,165,563,99362,2048,19853,78762,10,10057,67170,67404,163,43193,9820,126,72389,29362,1,30506,9953,165,73062,-45,61752,41646,163,10057,9820,167,43193,76020,0,70312,61752,78762,169,53955,2029]},awcbb_yhh_fun42:{variablePool:{key:null},zhili:[66932,84405,5497,64954,163,48530,58960,133,3697,55571,53,76204,95311,163,52980,18922,168,71658,45100,0,30506,37998,10,19853,58361,74,93963,53596,47300,10,48530,19210,50472,2,52878,26,58511,32545,65,71807,40180,12,53596,32545,10,10057,80837,53596,40180,65,23098,18922,104,64954,119,91582,7286,64147,7823,65,50073,74054,63144,-62,54009,67404,65,29478,15774,104,41646,27,5185,8080,2219,6713,65,3697,21204]},awcbb_yhh_fun43:{variablePool:{skipTempReset:null,name:null},zhili:[83875,95311,171,38549,43390,47300,172,38959,27,80837,20640,9820,65,7823,27,80837,74880,62566,102,28567,78762,103,72623,57979,108,71807,28050,55567,80796,103,43193,10733,28567,47300,104,23185,119,5619,50046,91410,15774,97,43071,95,25655,56456,74112,62,78762,65,66095,2375,74112,47,33414,47300,108,74470,11748,18087,44194,173,93963,20640,46212,125,50073,97059,67,22745,7790,1,15276,64277,62566,171,50073,56369,63012,2,37455,107,56456,83031,46212,27,30506,89542,165,61752,80796,165,61091,7651,41646,166,563,97059,133,57404,52836,4036,86,54009,60605,143,88198,15774,166,22541,11765,7823,165,52980,74470,57808,47300,27,2219,62566,143,61091,15774,174,72389,7790,1,23185,175,33783,45295,14,70265,18087,97059,143,72583,61752,97059,7,50073,15774,50,3697,5879,2,80958,18,32545,119,9103,64954,143,22745,15774,176,58854,41411,1,88198,57979,134,23098,5879,1,5347,39164,16,27212,80129,57979,143,23098,11765,46212,108,58752,82315,56456,33414,38959,143,61091,43193,8414,14138,165,45016,-100]},awcbb_yhh_fun44:{variablePool:{rootRecord:null},zhili:[32517,84405,56456,95311,104,43071,27,5619,53012,4592,64954,125,58752,64529,27,58752,7823,127,58854,76204,49304,179,2219,46212,179,22541,95645,48,53955,7823,51,44575,53320,2,55513,8,68343,26653,179,23098,60605,47,58752,90866,237,97059,180,52050,93777]},awcbb_yhh_fun45:{variablePool:{exception:null,context:null,handle:"awcbb_yhh_fun46",i:null,entry:null,record:null,hasCatch:null,hasFinally:null},zhili:[60605,186,55451,32545,182,41474,24544,74112,104,58854,50472,2,55513,5,54009,74112,182,72583,47762,39014,64277,65802,43,80796,119,91410,57979,125,52980,46212,133,71807,17262,64277,29242,135,18922,27,2219,58361,135,22745,84210,67541,276,39014,15774,125,43193,85728,18922,135,52980,29478,54009,43021,118,64153,44194,118,93963,15774,127,563,64153,99730,72,80129,62566,118,22541,32545,117,50073,59830,128,81616,67772,2,52189,9,41646,185,80129,9820,186,10810,31162,1,70130,27212,32545,172,22541,11765,40180,118,93963,26653,117,77380,47136,96274,2,73062,202,30506,15774,118,72389,41646,120,64277,62566,7,71807,26653,50,52050,940,2,72623,89106,187,64277,26653,118,99362,6713,122,2219,6713,7,29478,32545,50,53955,69977,2,11765,75421,188,63531,47300,187,23098,66706,4,18087,6713,188,22745,50472,91,18087,41646,187,53955,24642,50,54009,9820,188,50073,5347,70299,2,34299,9,32545,189,80129,32545,92,22541,46283,1,49545,88198,58960,118,52980,47300,122,22745,70265,46212,172,61091,34987,68180,2,63144,14,34933,95311,118,99362,41646,122,43193,58511,78762,186,61091,58767,1,27484,45016,33,8414,23185,118,29478,64954,120,58854,73741,46212,172,29478,57293,63012,2,55098,17,58511,62566,118,82317,41646,120,57404,43071,27,77519,30506,6713,186,53955,31162,2,74054,74241,63,5497,40180,118,22745,64529,120,48530,42083,38959,172,52396,34987,96274,2,54324,17,85408,18922,118,58854,44194,120,10810,44194,27,76,22433,23185,186,53955,31162,2,45243,2219,64529,118,3697,46212,122,71807,13261,67404,172,10810,40670,54335,2,24153,14,11765,60605,118,10810,62566,122,93963,8414,64954,186,74470,58767,1,5377,34933,41646,135,26653,119,33414,47300,135,52396,20246,10414,67806,-285]},awcbb_yhh_fun46:{variablePool:{loc:null,caught:null},zhili:[26472,64954,183,97059,184,71340,72623,44194,72,53955,47300,48,78762,51,11748,72623,40180,72,52980,26653,47,53596,6713,182,22541,8087,58511,80796,43,72389,40180,65,2219,80796,183,3697,52821,68343,32545,184,50073,32275,22,64147,64529,43,10057,80796,62,58960,65,38354,88198,64954,43,74470,78762,47,7651,38959,108,57404,38232,23185,95,64277,46212,184,99362,12569,56369,10515]},awcbb_yhh_fun47:{variablePool:{type:null,arg:null,i:null,entry:null,finallyEntry:null,record:null},zhili:[55451,41646,48,78762,47,27863,60605,119,70265,46212,125,563,47300,133,58752,62226,76204,56565,135,78762,27,58511,95645,135,52980,44509,59040,81,27212,43071,125,77380,22433,97059,135,10810,52396,2219,4427,118,27212,7823,172,77380,22433,64529,118,87026,74112,117,50073,66511,66706,15,54009,64529,118,563,23185,122,44588,40180,7,29478,15774,50,74470,7790,2,38269,12,53596,9820,118,53955,6713,122,52396,73741,62566,172,82317,35163,31824,2,55513,9,64277,58361,118,22745,84670,75421,191,24153,13,76204,9820,135,78762,119,8414,74112,135,52980,55461,20350,76073,-90,2219,7823,191,71807,69685,16,72623,41646,48,77380,95645,192,77007,40335,7,30506,78762,48,22541,97059,193,7275,2858,12,63531,6713,47,22745,11765,40180,191,22745,95645,117,72389,73580,27998,12,84670,46212,191,22745,41646,122,52396,11765,43071,47,43193,99781,89484,10,67170,67404,191,59830,95,93438,18087,58960,191,58854,88198,40180,191,52980,66558,3,93283,45016,7,68343,60605,191,10057,78762,127,58752,61752,37998,72,64277,59830,72,58854,32545,48,80129,26653,48,58854,38232,7651,15774,72,29478,32545,47,88198,9820,47,30033,11748,85408,58361,191,74470,50472,12,22433,97059,72,10057,27212,95311,194,58854,91090,1,64476,21,69624,64529,62,32545,65,52821,466,41646,65,18087,67404,191,22541,23185,122,99362,5659,61752,40180,53,72389,78762,95,45243]},awcbb_yhh_fun48:{variablePool:{record:null,afterLoc:null},zhili:[87017,95311,72,78762,123,27863,76204,60605,72,53955,58960,48,72389,64954,51,62328,47680,2,64476,8,19853,6713,72,29478,47300,47,93963,66647,7651,67404,72,87026,97059,48,22745,74112,192,7304,85948,10,85728,74112,72,53955,59830,48,29478,43071,193,81616,70299,79,5497,23185,72,30033,7823,48,3697,95311,66,55708,54335,32,80129,95311,72,77380,95645,48,72583,62566,49,44575,45295,4,64277,67404,123,53955,32275,12,24544,18922,65,88198,58361,123,3697,56020,73741,47300,65,22745,47329,33,93637,64529,180,31779,41646,47,63531,41646,72,29478,58361,47,48530,38232,42083,58960,47,52050,88857,42083,64954,62,64529,66,38354,93637,32545,65,7823,185,11125,64529,95,99693,11,70265,38959,65,84670,74112,72,29478,62566,47,52980,82315,54009,26653,53,10810,51512]},awcbb_yhh_fun49:{variablePool:{finallyLoc:null,i:null,entry:null},zhili:[60433,78762,122,40365,67404,119,31779,58960,125,48530,41646,133,22745,81348,34933,75559,135,60605,27,76204,67404,135,74470,75313,32528,78,466,80796,125,71807,2219,64529,135,22745,99362,76204,93083,118,72623,97059,122,29478,68343,40180,118,72583,38959,122,72583,93875,53320,2,86650,37,18087,44194,118,74470,32545,127,71807,85408,23185,118,87026,62566,123,74470,62834,23185,194,71658,97127,2,88698,85408,15774,118,71658,68343,41646,173,48530,31162,1,30238,64277,97059,53,72389,48422,72623,74112,135,74112,119,22433,6713,135,53955,96776,44288,55098,-87]},awcbb_yhh_fun50:{variablePool:{tryLoc:null,i:null,entry:null,record:null,thrown:null},zhili:[15348,40180,117,33529,44194,119,42083,58361,125,87026,95645,133,29478,20246,64147,75559,135,59830,27,33414,60605,135,3697,69692,66627,90,56456,40180,125,10057,22433,57979,135,72583,99362,9103,43021,118,19853,26653,117,23098,58511,47300,118,52050,58960,117,50073,95683,79810,2,79038,49,18087,9820,118,77380,7823,127,87026,84670,64189,72,85728,95311,72,53955,9820,48,30033,43071,51,98535,73109,2,18564,20,19853,7823,72,43193,58361,47,99362,7651,15795,200,19853,95645,118,77380,85728,58361,173,10810,58767,1,76204,7823,200,93963,5377,64153,60605,135,44194,119,18087,58960,135,99362,38460,55395,55513,-99,40180,201,34933,60605,92,53955,46283,1,47343]},awcbb_yhh_fun51:{variablePool:{iterable:null,resultName:null,nextLoc:null},zhili:[66887,74112,131,18922,114,58361,115,34962,31779,6713,97,63903,46212,17,33414,43071,131,71658,7651,15774,58,563,98239,1,71606,80796,114,54009,7823,114,93963,99379,80796,115,61752,95645,115,10057,44298,52431,4592,18922,62,52050,62566,65,79868,25595,12,93637,57979,47,11765,74112,108,563,87350,73741,97059,47,10810,19853,38959,53,23098,93555]},awcbb_yhh_fun52:{variablePool:{gen:null,resolve:null,reject:null,_next:null,_throw:null,key:null,arg:null,info:null,value:null,error:null},zhili:[78808,60605,205,38959,69,78762,70,58361,206,18922,207,60605,10,58960,47,96088,36784,29,16,1,7651,78762,47,82317,61752,80796,205,52396,33414,80796,10,61091,50073,29585,1,58511,79263,112,85728,47300,112,22541,38959,12,82317,76204,79263,12,35812,64277,58960,78,31354,9103,62566,78,52980,30506,38959,70,22745,27118,1,27484,21204,88999,64277,7823,112,52396,62566,104,10057,63012,28,8414,95645,206,99362,53596,32545,207,77380,76204,95311,12,74470,85728,6713,155,61091,7823,69,3697,29362,1,7823,81,58752,98239,2,60763,10,22433,67404,12,43193,84670,32545,69,50073,29362,1]},awcbb_yhh_fun53:{variablePool:{fn:null,awcbb_yhh_fun54:"awcbb_yhh_fun54"},zhili:[58361,213,26472,62566,46,38549,8414,80796,213,99362,10515]},awcbb_yhh_fun54:{variablePool:{self:null,args:null,awcbb_yhh_fun55:"awcbb_yhh_fun55"},zhili:[64529,212,66932,27863,237,85728,29242,36,67170,64529,208,77380,18087,4427,209,58511,43071,212,563,72623,95311,155,72583,40403,1,74054]},awcbb_yhh_fun55:{variablePool:{resolve:null,reject:null,gen:null,_next:"awcbb_yhh_fun56",_throw:"awcbb_yhh_fun57"},zhili:[67404,206,78762,207,39441,26653,69,43071,70,26431,85408,6713,36,58854,30506,78762,209,99362,68343,57979,46,23098,59830,210,10057,52058,2,11765,56565,205,30506,38959,108,53955,67170,38959,206,58752,29585,1]},awcbb_yhh_fun56:{variablePool:{value:null},zhili:[32517,67404,12,55173,88198,15774,205,22541,2219,38959,69,58854,72623,9820,70,53955,68343,80796,206,22541,80129,47300,207,53955,64954,65,54009,46212,12,53955,33414,74112,211,22745,5879,7]},awcbb_yhh_fun57:{variablePool:{err:null},zhili:[2158,62566,32,27863,18087,44194,205,71807,64277,15774,69,52050,76204,9820,70,52980,11765,59830,206,22745,8414,47300,207,22541,97059,51,54009,18922,32,72389,2219,59830,211,22541,45100,7]},awcbb_yhh_fun58:{variablePool:{e:null,a:null,cbbiyhh:null,awcbb_yhh_fun59:"awcbb_yhh_fun59",n:"awcbb_yhh_fun60",o:"awcbb_yhh_fun61"},zhili:[60605,233,59830,247,58960,225,4062,27863,59830,214,64277,57979,215,50073,57979,216,563,47409,1,64954,217,48530,64277,56565,218,63531,44194,218,57404,2977,7,53596,46212,218,99362,7823,219,87026,21105,11,95311,220,88198,38959,218,71807,67404,219,72583,940,1,25979,2,20376,1,35812,95645,27,68343,8184,221,26653,222,64277,95311,215,72389,18922,223,87026,63363,1,74112,133,22541,64147,46212,221,71658,8341,32528,43,34933,23185,224,43071,222,44588,9820,215,3697,47300,223,10057,89391,1,54009,95645,221,58752,52050,30381,53596,64954,224,3697,85728,95645,225,50073,58767,1,76204,80796,221,18922,119,30506,95311,221,52980,44291,55019,64476,-64,44588,95311,215,82317,76029,95311,226,95645,27,20757,24361,97059,227,78762,27,65991,71606,5497,60605,233,72583,58511,26653,234,50073,7969,1,23185,235,22541,89391,2]},awcbb_yhh_fun59:{variablePool:{a:null,i:null,cbbiyhh:null,d:null},zhili:[65870,44194,224,48183,64529,27,34933,37998,221,85728,62566,224,22541,62566,133,71807,2219,46212,221,61091,56822,94160,125,80129,41646,135,33414,78762,224,77380,5497,6713,221,74470,71807,82315,60605,226,64153,59830,135,22745,59830,48,82317,79868,45162,2,24153,85,62566,27,85408,33043,221,64153,46212,135,77380,38959,228,72583,59830,133,52050,5497,9820,221,77380,1908,94160,63,22433,15774,229,54009,95645,135,30033,57979,228,48530,5497,23185,221,22745,29478,50046,64954,230,85728,58960,229,72583,62566,231,72583,30449,21450,10,64529,220,5497,62566,229,61091,60605,232,61091,52038,98635,10,64277,47300,229,77380,88198,95645,225,53955,2732,1,11765,78762,221,15774,119,85728,7823,221,10810,18135,79248,55513,-80,67170,26653,221,26653,119,72623,43071,221,52980,13263,10414,99693,-139]},awcbb_yhh_fun60:{variablePool:{a:null,i:null},zhili:[65870,15774,224,50825,82265,34933,28196,135,34933,80796,224,10057,58960,236,61091,98635,21,44588,57979,135,99362,74112,236,64277,95311,224,53955,41646,236,74470,66095,64153,38959,135,77380,95645,236,87026,11765,40180,224,74470,64954,237,99362,32275,21,64153,32545,135,50073,62566,238,63531,58361,224,57404,41646,237,563,5659,33414,64954,135,72583,47300,238,10057,46212,239,80129,46212,224,10057,58361,240,50073,37867,70299,34,41646,241,85408,60605,224,74470,7823,240,10810,93875,88345,11,67170,18922,135,48530,95645,242,95645,243,50046,60763,9,34933,74112,135,72583,15774,242,40180,244,8087,55513,9,19853,59830,135,22745,57979,242,60605,245,27557,58511,97059,135,53955,51512]},awcbb_yhh_fun61:{variablePool:{a:null,i:null},zhili:[33338,23185,224,4004,33414,9820,224,52050,95645,246,22541,54335,2,24153,1,25284,64153,43071,224,57404,41646,246,23185,27,67805,52821,64153,62566,224,3697,80129,74112,247,22541,72218,1,67170,74976,135,58511,26653,224,61091,59830,248,57404,54009,23185,135,22541,5497,6713,249,50073,52058,2]},awcbb_yhh_fun62:{variablePool:{t:null,e:null,n:null,o:null,a:null,awcbb_yhh_fun63:"awcbb_yhh_fun63",awcbb_yhh_fun64:"awcbb_yhh_fun64",awcbb_yhh_fun65:"awcbb_yhh_fun65"},zhili:[44194,268,60605,272,18922,279,78808,6713,175,23185,218,44194,247,26569,34933,80796,175,53955,58361,89,563,64529,253,71658,25072,13,63531,64954,175,77380,46212,89,74470,95645,253,52980,9820,254,52396,5347,21105,4,30506,67404,218,50073,75288,4,18087,67404,247,29478,53320,2,37455,116,89873,62566,255,34933,26653,218,72389,82077,74112,256,58960,119,67805,22061,97059,257,15774,258,57481,9820,259,15774,260,87847,59830,261,95645,262,75467,80129,8158,225,95645,263,7651,23185,225,22541,8414,44194,264,22541,32545,1,77380,18922,265,10057,69977,2,33414,99730,224,7823,266,64147,57979,224,43193,58511,58361,175,99362,67404,267,58752,72218,2,33457,74112,78,33414,58960,268,71658,68343,74112,224,57404,64529,269,52050,90610,2,48861,32545,265,44588,60605,272,52980,44588,23185,224,52050,9820,269,99362,47409,2,7100,62566,273,53596,95645,279,58752,5497,23185,224,53955,97059,269,61091,74677,2]},awcbb_yhh_fun63:{variablePool:{i:null},zhili:[3749,7823,135,27863,61752,97059,224,74470,43071,185,3697,91090,0]},awcbb_yhh_fun64:{variablePool:{},zhili:[87017,72115,34933,67404,247,29478,81395,7823,270,26653,27,87847,68343,58960,224,29478,67404,271,58752,91090,2]},awcbb_yhh_fun65:{variablePool:{i:null,d:null,f:null,p:null},zhili:[32517,7823,135,60605,229,26653,274,54997,64147,23185,229,93963,53596,40180,275,87026,58960,276,48530,27118,1,72623,43021,277,76204,15774,277,77380,64277,57979,175,29478,7651,58361,278,53955,95012,2]},awcbb_yhh_fun66:{variablePool:{t:null,e:null},zhili:[68149,7823,175,9820,218,48183,62566,281,7651,78762,175,71807,64954,282,43193,7275,25219,2,37455,42,60605,283,64153,95645,284,82317,38959,285,71658,57979,286,22541,91504,1,58306,6713,287,63531,26653,284,22745,32545,285,87026,46212,286,99362,76020,1,63019,59830,288,80129,57979,289,30033,74112,126,77380,27118,1,38302,64147,18922,175,52396,95311,290,23046,25072,7,63531,67404,175,563,44194,290,71658,28741,10,58361,291,80129,58361,175,82317,18922,282,22541,74306,75288,23,9103,41646,175,563,7823,290,72583,7651,47300,175,74470,15774,292,53955,64147,15774,218,52396,58361,293,563,76020,2,64153,6713,175,10057,7823,294,89786,33970,7,33414,15774,175,71658,7823,294,3697,1910,69,46212,291,30506,38959,175,82317,18922,282,71658,37867,9169,41,93283,95645,273,8414,6713,175,71807,58361,294,72389,33352,6713,295,60605,296,33352,60605,48,26653,78,25022,67170,67404,297,72583,31162,1,62221,95645,298,19853,64529,218,29478,23185,267,22541,74677,1,55513,16,67170,32545,175,52396,58960,294,74470,53596,58361,297,99362,58960,299,30033,72218,1,95645,95]},awcbb_yhh_fun67:{variablePool:{t:null,e:null,n:null,o:null,a:null},zhili:[68149,80796,175,78762,218,70261,65470,34933,96735,247,98945,71527,17775,35896,66595,18087,33043,225,30506,95645,175,50073,33414,59830,302,563,40403,1,41646,303,10810,63363,0,58511,95645,302,563,1390,0,97059,303,77380,58767,0,44226,30506,75559,224,74112,304,74112,305,67404,306,15774,306,95645,307,51683,4067,10155,19398,54009,6713,224,72389,85089,1701,128,47300,305,64529,306,32545,306,74112,307,40894,56089,3511,30506,97059,224,10810,9300,50472,86,41646,306,9820,306,40180,307,10155,96110,58511,59830,224,72389,30744,68180,50,46212,124,18922,306,32545,307,89858,85876,5497,57979,224,52050,30744,73069,17,67404,27,7651,38959,224,71658,79350,73109,4,62566,308,34299,2,57979,309,13780,16,32545,310,44194,27,44194,306,58361,307,64277,57979,224,52050,56838,47020,68862,26909,67806,19,46212,311,57979,27,9820,306,15774,306,80796,307,64153,44194,224,58854,53903,15514,92919,61159,79230,24153,22,95645,312,59830,27,58361,305,64529,306,41646,306,7823,307,19853,95311,224,50073,36270,78323,36270,68683,51001,44291,49852,227,8414,57979,224,88198,23185,175,52396,67170,32545,302,50073,90916,1,11125,58511,95645,225,52396,9820,27,22745,58960,27,18087,95311,224,48530,23185,313,61091,63363,0,38959,314,53596,43071,247,72389,97059,315,22745,91504,2,7286,11765,23185,225,61091,9820,27,563,64954,119,43071,119,19853,59830,224,61091,97059,316,52396,5879,0,94863,8414,59830,247,30033,74112,315,52980,55207,1,7286,19853,95311,225,72389,38959,27,87026,74112,121,8414,80796,224,22745,15774,317,10057,29362,0,72623,38959,247,52396,62566,315,87026,69977,1,52431,72623,78762,218,52050,78800,86,18087,23185,225,30033,46212,119,10057,40180,27,11765,74112,224,93963,41646,318,23098,90610,0,44588,58960,247,99362,67404,315,52050,7790,1,30381,72623,67404,225,50073,62566,119,72389,95645,119,63531,46212,224,10810,64529,319,77380,59324,0,9103,38959,247,52980,74112,315,61091,72218,1,52431,64153,95645,225,52396,62566,119,23098,58361,121,18087,67404,224,71807,6713,320,99362,97127,0,64277,95311,247,10057,40180,315,10810,72218,1,93438,40180,95,74112,321,9103,44194,225,10810,47300,119,82317,6713,322,93963,29585,1,40180,323,7823,324,58511,74112,225,3697,38959,27,71807,15774,322,10057,69977,1,19919,79230,79180]},awcbb_yhh_fun68:{variablePool:{t:null,e:null,n:null,o:null},zhili:[33338,95645,175,15774,218,93730,57979,30,58511,96735,247,63531,9820,175,19853,41646,175,72389,9103,38959,326,58752,90610,1,30381,72623,97059,218,61752,32545,218,30033,9409,2,38959,121,66095,38959,95,76204,78762,175,53955,95311,133,22541,64153,89106,225,11765,15774,218,71807,68343,95645,225,22541,35163,15392,24,33414,95311,247,47300,327,54009,95645,247,22541,51599,71121,64153,26653,225,64529,119,33414,59830,225,71807,68644,47391,86650,-35,64529,328,88198,23185,218,93963,44588,62566,329,58854,47300,330,99362,68185,2,80129,43071,175,52980,85174,11078,6,64147,18922,175,48530,6341,12,26653,27,9103,32545,175,72583,88054,8414,95311,247,77380,9307,27484]},awcbb_yhh_fun69:{variablePool:{t:null,e:null},zhili:[2158,58960,175,80124,59830,27,33414,64954,175,93963,59,9169,2,77765,6,40180,30,60605,27,91542,93777,38959,30,85408,59830,175,52396,1014,84670,79263,218,7651,97059,218,88197,333,334,40180,30,88198,78762,218,77380,78762,335,563,55207,2,27781,7651,57979,218,39365,336,334,59830,337,30506,78762,218,563,64529,335,93963,90610,2,28050,33414,44194,218,40180,338,64529,337,69648,339,334,62566,30,62566,337,57979,338,34933,47300,218,58752,67404,335,57404,31787,2,9820,335,10057,27118,2,6713,335,77380,7790,2,28050,72623,97059,218,35662,340,30,95311,341,11765,46212,218,71807,58960,335,50073,89391,2,27557,60605,27,26653,337,67170,43071,218,23098,60605,342,58752,41411,1,18864,33970,7,44194,30,11765,40180,218,52050,85353,50725,18,44588,9820,218,54009,59830,218,99362,18087,7823,343,74470,69977,1,30381,85728,95311,218,58752,18087,74112,218,99362,70130]},awcbb_yhh_fun70:{variablePool:{t:null,e:null,n:null},zhili:[66932,64954,175,38549,2219,95645,175,48530,7651,60605,302,71807,76385,1,22433,64189,218,9103,46212,218,93963,58511,32545,302,22541,41646,276,87026,98239,1,34933,65802,247,46212,307,44588,78762,247,58854,15514,82306]},awcbb_yhh_fun71:{variablePool:{t:null,e:null,n:null,o:null,awcbb_yhh_fun72:"awcbb_yhh_fun72"},zhili:[64529,353,36174,58361,175,98809,43071,348,67170,9820,284,22745,74112,285,22745,74112,349,52980,27370,1,54009,41837,218,72623,64529,218,53955,44380,15764,43,85728,95645,218,52980,44194,350,13263,5497,25791,247,61752,6713,218,87026,9820,351,43314,64277,79263,225,8414,44194,175,10057,54009,64529,225,93963,76204,41646,247,58752,30506,58361,251,58854,9820,252,53955,29362,3,13780,20,61752,32545,353,52396,95645,354,54009,97059,355,22745,32545,349,52050,74677,1,32545,81,22541,68185,1]},awcbb_yhh_fun72:{variablePool:{n:null,o:null,a:null},zhili:[60433,97059,247,54997,46212,291,7651,58960,247,30033,64954,282,58752,86214,73069,2,55513,77,9820,348,34933,58960,247,72389,6713,73,30033,58960,348,74470,68343,58960,284,72583,95311,285,52050,64529,352,50073,90610,2,80129,18922,247,93963,97059,73,3697,47300,348,10057,23185,350,18135,44588,33043,225,84670,40180,247,48530,78762,73,23098,40180,348,58854,74112,351,13263,44588,89542,224,64277,32545,175,22745,63531,26653,224,71807,34933,95311,225,563,61752,7823,251,87026,46212,252,58854,2732,3]},awcbb_yhh_fun73:{variablePool:{t:null,e:null,n:null,o:null,a:null},zhili:[43254,60605,175,98809,85408,18922,175,563,39076,31824,2,45016,3,64529,358,45607,68343,95311,175,82317,84670,15774,359,57404,91090,1,84670,8184,218,57979,27,80129,89106,247,32545,27,5497,15308,225,58361,306,61752,67404,218,48530,33426,2977,81,2219,57979,247,57979,306,18087,7823,218,10057,63186,18087,95645,359,52396,58767,1,52431,8414,15774,218,44194,306,88198,59830,218,74470,21934,7651,43071,359,74470,72218,1,93438,9820,306,9103,40180,247,52980,74250,21450,36,19853,23185,225,43071,306,58511,64954,247,77380,36842,8414,57979,359,72583,76020,1,11748,5497,23185,247,23185,306,44588,41646,247,74470,2020,72623,74112,359,50073,27370,1,80611,32545,95,18922,95,41646,360,53596,58960,218,99362,88198,9820,359,82317,27118,1,74112,30,38437,43314,72623,75421,224,7823,27,33414,59830,247,30033,83614,50725,29,85408,67404,224,2219,58361,224,43193,18922,361,33414,38959,247,29478,11765,7823,359,74470,5879,1,95645,30,81324,27429,54943,10733,30506,57979,224,50073,9820,27,80129,26653,225,72583,16272,4411,29,18087,41646,224,33414,6713,224,50073,58361,362,54009,95645,225,52050,80129,46212,359,52050,59324,1,60605,30,27606,45035,91542,88857,18087,64529,224,29478,53596,23185,224,3697,41373]},awcbb_yhh_fun74:{variablePool:{t:null,e:null,n:null,o:null},zhili:[55451,15774,175,23282,46212,27,64147,6713,175,22541,70757,85948,7,59830,30,9103,9820,175,52980,77007,47002,7,46212,95,84670,7823,175,58752,70757,9409,8,59830,27,15357,5497,44194,175,29478,70757,80250,5,33414,40180,175,23098,13630,45162,2,74241,3,57979,365,99221,62566,366,72623,56565,218,99151,46212,367,62205,46212,368,32736,80796,369,43150,95645,370,66595,78762,371,3410,43071,372,57126,15774,373,89199,43071,374,46782,64954,375,89199,84670,37998,247,34933,97059,218,82317,76204,58960,329,77380,67404,376,74470,63363,1,61752,97059,175,29478,64153,38959,329,43193,95311,376,77380,97127,1,36842,84670,32545,329,22745,95311,377,48530,69977,1,61752,15795,225,19853,23185,247,58752,33414,64954,225,52980,71658,57979,323,58960,119,64153,32545,218,77380,88198,26653,225,61091,63531,9820,329,563,47300,330,52396,52058,2,33414,32545,175,58854,56838,62566,378,61091,41411,1,81324,44291,36151]},awcbb_yhh_fun75:{variablePool:{t:null,e:null,n:null,o:null,awcbb_yhh_fun76:"awcbb_yhh_fun76",a:null,i:null},zhili:[44194,382,12239,97059,175,38959,218,72238,60605,95,34933,18922,175,22745,70757,47229,7,97059,30,53596,95311,175,29478,77007,51849,2,60763,3,59830,30,45243,13261,64277,75559,247,11765,64954,382,23098,97127,0,64822,6,72623,41646,302,99362,26012,0,72623,59830,302,72583,26012,0,54009,79263,225,39561,64277,44194,225,48530,46212,313,71807,98239,0,95311,314,61752,38959,247,99362,9820,315,74470,72218,2,24937,44194,119,44588,32545,225,30033,15774,316,53955,63363,0,27606,67170,41646,247,3697,18922,315,93963,69977,1,84051,88198,43071,225,30033,60605,317,71807,91504,0,19853,74112,247,74470,18922,315,71658,58767,1,84051,58511,51516,224,10151,5497,62566,225,563,46212,318,52050,95012,0,67170,67404,247,3697,6713,315,58854,90610,1,77548,63531,23185,225,77380,59830,319,71807,940,0,53596,44194,247,53955,18922,315,52050,89391,1,43150,54009,62566,225,52980,23185,320,29478,55207,0,84670,62566,247,23098,95311,315,52980,59324,1,224,68343,74976,135,67170,18922,225,72389,23185,317,10810,31162,0,53320,16,43071,383,88198,26653,384,30033,44194,78,71658,74677,1,14239,62566,30,60763,106,53596,47300,218,34933,67404,218,99362,94604,2,58960,385,38232,14608,386,334,80129,64954,135,43193,57979,121,53955,51289,387,334,84670,57979,135,48530,44194,119,72583,39365,388,334,5497,38959,135,10057,80796,27,52050,4267,389,334,63531,38959,224,22541,18922,121,53955,16751,390,334,7651,46212,224,50073,78762,119,99362,62720,391,334,30506,57979,224,10810,44194,27,74470,67170,64954,218,10057,62566,335,71807,91090,2,80796,335,71658,97127,2,40180,335,53955,31162,2,23185,335,52980,31787,2,80796,335,71658,72218,2,41646,335,43193,27118,2,35812]},awcbb_yhh_fun76:{variablePool:{},zhili:[70676,71340,61752,80796,175,52980,39732,5185,63012,2,73062,45,85408,46212,175,22541,8414,57979,134,48530,76020,1,54335,28,60605,381,34933,62566,175,61091,78321,99621,45162,6,34933,6713,175,53955,44039,10,11765,41646,175,61091,53596,57979,359,72583,91504,1,77765,4,33414,38959,175,563,21204]},awcbb_yhh_fun77:{variablePool:{t:null,e:null},zhili:[17105,44194,175,34962,39365,394,30,18087,41837,218,85408,58361,175,10057,33414,62566,218,72389,6713,395,72583,59324,1,45243]},awcbb_yhh_fun78:{variablePool:{t:null,awcbb_yhh_fun79:"awcbb_yhh_fun79"},zhili:[18922,400,5169,64529,175,96088,62566,398,88198,95645,400,72583,85408,6713,215,52050,26653,401,93963,31787,2]},awcbb_yhh_fun79:{variablePool:{},zhili:[65870,93730,8414,64954,215,10057,41646,399,29478,5497,26653,175,48530,58767,1]},awcbb_yhh_fun80:{variablePool:{t:null,e:null,n:null,awcbb_yhh_fun81:"awcbb_yhh_fun81"},zhili:[15774,409,15348,44194,175,64529,218,84405,41646,404,76204,15774,175,29478,1014,76204,97059,284,71658,95311,285,87026,32545,349,58854,29362,1,64153,93083,247,9103,23185,247,50073,64650,16816,17,72623,26653,284,53955,95645,405,58854,74677,0,19853,40180,247,71807,74112,406,71658,69186,96274,16,5497,95311,218,43193,46212,407,9103,41646,247,72389,67404,408,57404,88857,18631,28,8414,7823,409,52396,41646,410,68343,46212,175,29478,59830,411,83270,27429,7651,26653,355,10057,95311,349,99362,7790,1,9820,81,71807,95012,1]},awcbb_yhh_fun81:{variablePool:{o:null,a:null},zhili:[70676,97059,225,9832,34933,32545,218,71807,23185,407,9103,58361,225,52980,6713,73,30033,95311,408,99362,38232,62733,95645,408,64153,47300,225,10057,26653,73,77380,44194,408,22745,61744,80796,406,6713,305,26653,306,6713,306,43444,37458,76204,97059,284,99362,6713,405,71807,52058,0,44291,16485,72623,93083,224,60605,404,53596,58361,175,52396,45035,68343,60605,224,43193,61752,38959,284,72389,78762,285,563,97059,352,72583,74677,2]},awcbb_yhh_fun82:{variablePool:{t:null},zhili:[64106,46212,175,38549,60605,27,25016,85728,59830,175,72389,98535,53320,2,52189,6,53596,44194,175,97059,30,82315,9103,95311,175,50073,78e3,10,44588,58361,175,23185,414,28050,63531,67404,175,3697,85408,9820,302,58854,46283,0,26653,303,30033,31162,0,57979,324,47300,27,15774,314,15774,415,67404,416,85728,38959,329,71658,60605,417,57404,29585,0,3511,7823,160,10057,52058,1,41646,418,23098,27118,2,5497,64954,175,58752,27606,28156,68644,48422]},awcbb_yhh_fun83:{variablePool:{t:null},zhili:[3306,9832,85728,23185,302,93963,81055,0,76204,62566,302,77380,58960,276,29478,940,1,58511,89106,175,44194,307,34933,64954,175,48530,62917,11765,59830,359,57404,91090,1,19567]},awcbb_yhh_fun84:{variablePool:{t:null,_t$split:null,e:null,n:null,o:null,a:null,i:null,d:null,f:null,p:null,b:null},zhili:[33338,80124,6713,307,88198,67404,284,57404,95311,405,77380,90610,0,4804,80796,422,58511,7823,284,50073,62566,380,50073,31787,2,19853,75009,175,38959,324,2219,41646,175,29478,74112,423,52980,27118,1,61752,75559,424,53596,62566,424,10057,7823,27,58752,72623,75421,218,88198,95645,424,52980,32545,119,93963,61752,75009,247,22433,62566,424,93963,95311,121,10810,85728,43021,225,19853,18922,218,52396,19853,95311,247,3697,6713,27,72623,58960,302,48530,17482,3,67170,65921,224,64277,41646,224,19853,40180,224,22745,44194,317,29478,47409,0,80611,8414,64529,218,3697,80129,93083,135,64529,119,30506,58960,247,563,61752,26653,359,57404,74677,1,15713,33414,37998,229,6713,425,18087,23185,229,71658,34266,32275,25,63531,59830,135,67404,119,33414,18922,135,57404,30506,43071,359,52980,58767,1,65501,8087,85728,62566,229,74112,119,78309,67404,95,85728,44194,225,23098,76204,93083,274,84670,95645,135,10057,58511,95311,229,3697,64954,27,64147,95645,302,58854,39085,3,2219,74976,277,68343,18922,277,58511,18922,277,22745,43071,317,52980,76020,0,82315,76204,15774,277,77380,85408,38959,274,52396,34213,4411,12,80129,44194,274,5497,38959,277,72583,27781,30506,9820,274,29478,95645,328,58511,15774,229,61091,69668,80958,15,64147,38959,229,18087,40180,229,93963,38959,327,54943,30381,11765,32545,229,87026,38959,95,88198,26653,274,23098,38959,324,84670,47300,229,30033,60605,324,19853,78762,135,77380,27606,83270,54943,54943,18087,29242,426,80129,38959,426,10057,66353]},awcbb_yhh_fun85:{variablePool:{t:null,e:null,n:null,o:null,awcbb_yhh_fun86:"awcbb_yhh_fun86"},zhili:[6713,439,3749,40180,175,57979,218,47300,247,23282,5497,62566,429,82317,34458,0,30506,56565,225,33414,44194,439,61091,19853,80796,175,58752,38959,440,30033,97127,1,69301,34933,78762,225,52980,58735]},awcbb_yhh_fun86:{variablePool:{a:null},zhili:[80562,43071,224,26569,15774,27,83991,88198,43071,247,10057,81616,50472,31,30506,44194,224,57404,44194,430,57979,307,54009,47300,224,57404,43071,430,29478,43444,44194,27,13630,53596,18922,284,74470,97059,301,22541,91504,2,28050,63144,28,5497,62566,224,52396,46212,430,95645,307,54009,41646,224,99362,47300,430,72583,19398,41646,431,8414,58960,284,10057,15774,380,50073,29362,2,88857,54009,40180,224,57404,64954,432,89786,98870,52,54009,26653,224,77380,67404,432,58960,27,54009,64954,224,71807,38959,432,58854,25008,22695,4,57979,433,76073,22,97059,307,80129,46212,224,77380,95645,432,61091,45915,43071,27,5185,53596,58361,284,50073,7823,301,74470,91090,2,50046,2219,58960,224,50073,40180,432,99362,85728,74112,224,22541,41646,434,31296,80958,52,8414,95311,224,93963,47300,434,26653,27,67170,59830,224,72389,57979,434,22541,3604,22695,4,43071,433,86650,22,44194,307,72623,95311,224,87026,57979,434,82317,4804,80796,27,56369,30506,95645,284,57404,67404,301,29478,52058,2,27557,30506,62566,224,87026,78762,434,22541,61752,18922,224,3697,15774,435,59809,69685,10,95645,27,58511,46212,224,53955,59830,435,53955,21612,45295,69,53596,6713,284,58854,64954,405,10810,52058,0,2219,15774,224,43193,78762,435,50073,55038,70299,11,58511,15774,224,52050,41646,436,64954,30,27557,20376,9,19853,95645,224,53955,9820,436,6713,437,11748,76204,57979,224,93963,38959,438,80796,307,22433,43071,224,72583,67404,435,563,3511,58960,422,53596,7823,284,74470,43071,380,29478,940,2,30381,41646,95,46212,15,64147,67404,218,10810,49680,7787,25072,10,58511,64954,224,22745,80129,59830,218,43193,7790,1,11765,15774,224,61091,19853,62566,225,93963,43071,126,74470,7790,1]},awcbb_yhh_fun87:{variablePool:{t:null,e:null,n:null,o:null},zhili:[33338,72238,80129,44194,302,29478,59088,0,5497,94555,175,19853,59830,175,30033,23185,313,57404,89391,0,5497,75421,218,60605,119,8414,80796,175,53955,78762,316,48530,69977,0,26909,2219,43021,247,22433,23185,175,58752,7823,317,3697,97127,0,64153,65802,225,63531,97059,225,99362,26653,324,58511,43071,247,29478,40180,324,11765,47300,218,30033,1014,19919,51599,13263,99221]},awcbb_yhh_fun88:{variablePool:{t:null,e:null,a:null,n:null,o:null,_unused:null},zhili:[5169,97059,175,48183,9820,119,33501,6713,337,33414,78762,175,57404,18922,342,50073,68185,1,57724,22695,2,20376,79,9820,337,54009,47300,175,93963,76204,95311,326,52980,29362,1,43071,423,72583,7790,1,44588,56565,218,63531,9820,218,72389,32545,27,22745,54009,7823,284,3697,58361,285,82317,97059,349,93963,31162,1,53596,37998,224,67170,78762,224,50073,21105,12,11765,67404,224,71807,11765,62566,218,77380,7823,119,52396,58854,11383,4,23185,30,18564,12,80129,95645,224,52050,76204,57979,218,72389,67404,119,10810,52980,45607,11765,64954,175,77380,9103,23185,444,58854,18922,445,10810,63363,1,80129,8184,247,15774,30,11765,65802,225,80983,22,13,1,30506,78762,225,54009,23185,247,29478,8414,23185,275,58854,59830,276,93963,90610,1,64822,2,9820,30,80611,966,22433,23185,446,55424,85408,67404,225,44588,67404,247,50073,80611,36151,58735,76204,9820,225,43193,70130]},awcbb_yhh_fun89:{variablePool:{t:null,e:null,n:null,o:null},zhili:[96628,23185,175,40180,218,21418,78762,119,29,60605,337,22433,32545,175,29478,47300,342,10057,27118,1,28557,79810,45,34933,57979,175,53955,64954,74,2219,62566,218,58752,69086,89595,78395,6,85408,18922,218,23098,64476,13,88198,41646,218,87026,7651,58960,275,23098,23185,448,10810,97127,1,19853,44194,444,71658,38959,449,22541,2732,2,38302,52878,84,60605,337,53596,26653,175,30033,67170,78762,326,99362,940,1,58960,423,72583,59324,1,88198,15308,247,64277,38959,247,87026,64954,27,52050,64153,40180,284,58752,18922,285,58854,38959,349,3697,69977,1,85699,1,89401,7651,96735,225,61752,57979,225,99362,44588,74112,247,58752,23185,119,563,19853,80796,218,3697,93438,2219,97059,247,52050,44194,27,58854,67170,64529,225,29478,18087,67404,284,77380,46212,285,72583,97059,352,53955,76020,2]},awcbb_yhh_fun90:{variablePool:{t:null,e:null,n:null},zhili:[5169,64954,175,26569,32545,27,78039,85728,41646,175,58854,44575,79810,2,44039,6,63531,62566,175,40180,30,80837,58361,119,7289,9820,337,58511,15774,175,30033,58960,342,23098,31162,1,77425,25979,33,80129,58361,175,93963,66558,11,63531,9820,444,48530,64529,451,10057,97127,0,52878,13,68343,32545,175,71658,22433,64954,444,22745,40180,452,43193,55207,1,21204,86650,97,64954,337,76204,95311,175,87026,54009,23185,326,61091,27370,1,58960,423,23098,7790,1,34933,93083,218,64153,57979,218,563,26653,27,99362,18087,58960,284,22541,44194,285,57404,26653,349,74470,91090,1,80250,1,51153,44588,75559,247,53596,40180,247,22541,34933,9820,218,99362,26653,119,43193,71817,25072,12,68343,9820,247,22541,9103,58361,218,52050,6713,119,93963,83965,45295,23,85408,95311,218,3697,95311,27,77380,2219,46212,247,61091,84670,74112,284,10057,58960,285,72389,7823,352,61091,31787,2,45607]},awcbb_yhh_fun91:{variablePool:{t:null,e:null,n:null,o:null,a:null},zhili:[26779,47300,175,50825,64147,57979,302,57404,23171,0,18087,89106,218,61752,59830,175,43193,67170,78762,218,77380,74112,316,22745,91504,0,91542,2219,41646,218,52396,78762,455,22541,27118,1,19853,18922,218,87026,95645,313,30033,29585,0,9103,64189,247,64529,119,11765,60605,218,61091,15774,316,74470,29362,0,54943,44588,49304,225,88198,43071,218,74470,59830,317,52980,47409,0,64277,75559,224,23185,328,7651,7823,225,43193,65063,33970,15,85728,95311,225,5497,78762,225,57404,58361,327,28156,8087,53596,80796,225,43193,74112,328,58511,32545,224,22745,34987,28741,15,80129,41646,224,64147,7823,224,52050,78762,327,13263,8080,19853,46212,224,53955,30506,78762,224,72583,97059,324,88198,32545,225,87026,60605,324,67170,62566,247,43193,19919,79230,94863,83270,35812]},awcbb_yhh_fun92:{variablePool:{t:null,e:null,n:null,o:null},zhili:[17105,40180,175,78762,218,23282,5497,9820,175,22433,18922,175,563,22433,60605,302,52980,38959,276,53955,95012,1,8080,63531,18922,218,34933,64529,218,10057,7651,26653,302,72583,95311,276,52980,90610,1,80837,95311,95,64153,80796,175,99362,68343,15774,218,72583,76288,64277,6713,329,57404,47300,458,30033,74677,1,80129,15795,247,62566,307,32545,459,7823,305,4067,37209,2219,64529,247,10810,15514,22433,18922,329,82317,67404,377,52396,47409,1,61752,29242,225,54009,6713,225,52980,88999]},awcbb_yhh_fun93:{variablePool:{t:null,e:null,n:null,o:null,a:null,cbbiyhh:null},zhili:[32517,62566,175,80796,218,70261,76204,95645,175,52396,67404,461,50073,40335,4,80129,97059,175,71807,76204,25791,247,95311,27,72623,43021,221,53596,80796,218,29478,38959,133,3697,64147,97059,221,77380,18864,26420,80,2219,18922,218,74470,85408,64954,275,61091,41646,448,71658,74677,1,80129,44194,462,58854,32545,376,52050,63363,1,80129,62566,225,72623,47300,218,99362,7651,62566,221,10810,48530,97059,27,52050,53012,85408,97059,224,67170,97059,218,58752,64147,18922,221,43193,22541,26653,119,99362,5659,64277,78762,247,50073,44588,44194,225,10057,67170,32545,224,99362,56020,19853,26653,221,44194,119,44588,64954,221,87026,78302,20350,63144,-94,85728,6713,247,22541,66353]},awcbb_yhh_fun94:{variablePool:{t:null},zhili:[39441,97059,175,71340,7823,465,64277,32545,466,71658,2732,1,62221,80129,95645,175,54009,64529,175,72583,98239,0,10733,68343,38959,467,48530,90610,0,30238,61752,62566,175,57404,93555]},awcbb_yhh_fun95:{variablePool:{},zhili:[68149,21418,43071,477,61449,57979,470,74112,478,61744,47300,95,60605,119,33501,64147,95311,479,29478,47409,4,10515]},awcbb_yhh_fun96:{variablePool:{t:null,e:null,n:null,o:null,awcbb_yhh_fun97:"awcbb_yhh_fun97",awcbb_yhh_fun98:"awcbb_yhh_fun98",awcbb_yhh_fun99:"awcbb_yhh_fun99"},zhili:[40180,498,23185,500,46212,568,26472,41646,175,41474,34933,57979,493,58752,95012,0,44588,75009,218,87764,59830,407,62566,30,16485,72623,60605,494,22745,69977,1,30506,64189,247,22433,38959,498,23098,9103,78803,225,30506,57979,500,22541,58511,18922,501,50073,91090,1,60380,54009,95311,568,43193,10515]},awcbb_yhh_fun97:{variablePool:{},zhili:[43254,38549,9820,27,61752,15774,218,10057,64529,89,30033,95311,495,71807,97059,133,10057,66511,98029,13,46212,496,57979,497,64277,6713,218,30033,46212,293,61091,27118,2]},awcbb_yhh_fun98:{variablePool:{},zhili:[39441,33529,23185,499,33414,26653,247,48530,64277,38959,284,87026,40180,403,3697,72218,2]},awcbb_yhh_fun99:{variablePool:{a:null,i:null,d:null,f:null,p:null,b:null,r:null,awcbb_yhh_fun100:"awcbb_yhh_fun100"},zhili:[67404,567,66932,38959,224,18922,135,86572,2219,43071,502,52980,19853,15795,229,8414,64529,503,50073,88198,75559,274,76204,43071,504,22745,80129,74976,277,63531,47300,505,22541,84670,89106,426,18087,60605,506,53955,72623,89106,507,84670,26653,508,77380,27370,0,94956,5497,44194,507,58854,76029,46212,509,56027,6713,510,78762,327,60637,60637,78733,59830,511,68343,95645,567,3697,53596,74112,521,93963,27370,1,21585,18922,508,58960,119,75467,44588,74112,334,87026,31787,3,70130]},awcbb_yhh_fun100:{variablePool:{awcbb_yhh_fun101:"awcbb_yhh_fun101",awcbb_yhh_fun106:"awcbb_yhh_fun106",awcbb_yhh_fun107:"awcbb_yhh_fun107"},zhili:[57979,543,44194,547,15774,566,2158,84405,30711,26653,512,67170,23185,472,22745,99151,44588,46212,274,82317,6713,95,76029,6713,511,8414,9820,543,99362,64277,41646,521,77380,31162,1,96762,74112,508,18922,119,33352,72623,6713,544,50073,89391,3,80570,19853,97059,479,99362,90610,3,24937,78762,512,64153,80796,489,52396,3933,61752,43071,426,30033,68012,58361,545,64153,15774,218,93963,64153,7823,514,53955,29362,1,60605,89,10810,32545,495,52980,16485,58960,546,5497,47300,135,71658,18922,27,10057,97413,18,11765,58361,135,22541,67404,27,64153,60605,547,74470,56020,85408,44194,135,58752,64954,27,71658,16485,40180,548,95645,30,44605,74112,549,80796,550,44605,64529,551,46212,552,33352,78762,553,9820,554,24361,15774,555,74112,30,82077,9820,556,85728,32545,218,50073,61752,41646,514,52050,91504,1,97059,89,563,41646,527,52050,95311,528,52050,22061,38959,557,47300,119,85288,64529,558,34933,67404,225,72583,32881,18922,559,62205,76204,95645,560,22541,29362,2,60637,89401,97059,561,9103,41646,566,72389,68343,43071,521,53955,41411,1,33352,15774,508,58361,119,96225,7823,535,67718,40180,545,70041,6713,556,3410,95311,558,84214,67170,23185,544,23098,69977,5,47670,88198,59830,479,82317,89391,3,80570,2029]},awcbb_yhh_fun101:{variablePool:{awcbb_yhh_fun102:"awcbb_yhh_fun102",awcbb_yhh_fun104:"awcbb_yhh_fun104",awcbb_yhh_fun105:"awcbb_yhh_fun105"},zhili:[46212,523,47300,539,9820,542,83875,96088,48503,6713,27,5347,68343,97059,508,71658,29362,1,15276,72623,80796,513,52396,7823,95,19853,60605,218,52396,33414,60605,514,52980,97127,1,95311,89,72389,40180,515,22745,64277,15774,523,52396,11765,78762,524,3697,55207,2,23185,525,2219,95645,526,71807,47409,4,46792,5497,46212,218,82317,8414,9820,514,50073,7790,1,46212,89,77380,64954,527,23098,95311,528,71658,9169,13,58960,30,47300,27,65991,58511,46212,529,87026,59324,2,55098,62,9103,80796,508,57404,91090,0,45196,33414,9820,229,58854,15979,44194,10,59830,27,22061,58361,470,40180,530,71606,6713,531,74112,532,61744,6713,533,43071,415,72563,74112,534,62566,535,85288,76035,95645,511,34933,60605,539,43193,61752,95311,521,23098,7790,1,33352,58361,508,47300,119,71606,80129,95645,334,48530,63363,3,47670,23185,27,58511,32545,218,29478,64153,97059,514,563,59324,1,74112,89,50073,44194,515,93963,57979,133,58752,73580,35837,13,60605,30,64529,27,39076,85408,26653,529,53955,45100,2,13780,52,19853,23185,508,87026,2732,0,72601,7651,78762,229,61091,15979,64954,10,32545,119,4486,9820,477,46212,305,72563,80796,470,62566,540,44298,41709,46212,511,58511,60605,542,22541,44588,64954,521,48530,69977,1,9206,64954,508,58361,119,35714,30506,15774,334,50073,41411,3,318,5377]},awcbb_yhh_fun102:{variablePool:{c:null,B:null,awcbb_yhh_fun103:"awcbb_yhh_fun103"},zhili:[95311,520,64106,46212,516,58361,367,55173,68343,44194,508,29478,89391,0,15276,5497,46212,229,71807,15979,44194,477,58960,305,64879,62566,10,61752,46212,367,58752,72563,74112,470,35896,18922,517,3410,2219,59830,516,48530,32545,48,23098,591,53596,38959,518,72583,5879,1,85288,63903,40180,511,88198,43071,520,71658,54009,15774,521,99362,63363,1,21585,15774,508,6713,121,33352,74112,522,32881,57979,470,70041,72623,57979,334,72389,27118,5,41373]},awcbb_yhh_fun103:{variablePool:{},zhili:[33338,86572,67718,38959,512,5497,74112,474,52980,64153,38959,516,72389,67404,294,563,88198,43071,519,30033,72218,1,47300,119,53596,78762,479,52396,69977,4,17775,88999]},awcbb_yhh_fun104:{variablePool:{},zhili:[83875,70261,71527,6713,512,85728,40180,476,52050,3933,34933,58960,481,77380,40640,62566,477,18087,64954,483,58752,68343,38959,218,58752,7651,43071,514,52396,69977,1,64954,89,74470,95311,536,58854,6713,537,72389,85728,57979,519,10057,68185,1,44194,119,11765,58361,479,71807,63363,4,3410,54009,32545,479,99362,55207,3,47670,58960,512,64147,46212,485,72583,22433,38959,218,52396,85408,46212,514,50073,41411,1,57979,89,23098,64529,538,82317,32545,408,93963,64153,32545,519,99362,31162,1,58361,119,9103,60605,479,82317,91090,4,224,58735]},awcbb_yhh_fun105:{variablePool:{},zhili:[87017,71340,39561,95311,512,29172,57979,470,64529,541,24361,23185,486,33414,38959,247,53955,58361,407,72389,44298,95311,95,64529,535,88198,38959,487,99362,64153,64954,479,72583,940,5,71341,67717]},awcbb_yhh_fun106:{variablePool:{c:null},zhili:[70676,74112,516,93730,68343,38959,218,71658,53596,64529,514,23098,74677,1,95645,89,74470,95311,495,33414,44194,516,52980,50046,21204]},awcbb_yhh_fun107:{variablePool:{awcbb_yhh_fun108:"awcbb_yhh_fun108"},zhili:[58361,565,43254,84405,10151,19853,23185,277,57404,83815,58960,562,84670,44194,225,10057,60637,44194,528,30506,46212,218,52050,80129,7823,514,71807,41411,1,95645,89,22541,32545,527,3697,47300,528,93963,92215,9820,556,88198,32545,218,48530,53596,95645,514,23098,90610,1,40180,89,563,62566,527,58752,74112,563,93963,59137,87764,46212,511,61752,6713,565,22745,88198,67404,521,87026,76020,1,21585,18922,508,64954,119,92215,6713,535,62247,15774,528,17775,44194,556,8934,63531,80796,544,10810,31162,5,17775,53031]},awcbb_yhh_fun108:{variablePool:{},zhili:[3749,13021,93754,61752,43071,218,10810,7651,74112,514,77380,52058,1,60605,89,71807,44194,527,61091,62566,408,74470,85408,58960,519,99362,29585,1,26653,119,64153,64954,564,77380,58767,2,71341,66353]},awcbb_yhh_fun109:{variablePool:{t:null,e:null,n:null,awcbb_yhh_fun110:"awcbb_yhh_fun110",o:null,awcbb_yhh_fun111:"awcbb_yhh_fun111",a:null,awcbb_yhh_fun114:"awcbb_yhh_fun114",i:null,awcbb_yhh_fun118:"awcbb_yhh_fun118",d:null,awcbb_yhh_fun119:"awcbb_yhh_fun119",f:null,awcbb_yhh_fun122:"awcbb_yhh_fun122",p:null,awcbb_yhh_fun124:"awcbb_yhh_fun124",b:null,awcbb_yhh_fun126:"awcbb_yhh_fun126",awcbb_yhh_fun127:"awcbb_yhh_fun127"},zhili:[47300,602,43071,611,58960,620,97059,623,57979,636,57979,648,67404,651,15774,656,74112,702,83875,57979,175,93730,31460,64954,283,7823,30,85288,47300,587,97059,30,59137,58960,282,18922,30,21585,47300,588,68012,78762,528,15774,119,13630,25022,47300,563,43071,119,71268,16485,18922,408,41646,589,85288,34264,47300,590,76035,57979,528,26653,119,1828,96762,26653,563,60605,119,44380,33352,64529,408,44194,591,97453,64879,95311,592,87764,80796,593,62566,95,44298,44194,594,41646,306,64879,82077,60605,595,26653,30,92215,80796,596,95311,30,44605,9820,597,47300,30,60637,95311,598,6713,27,4486,23185,599,97059,30,96762,47300,600,74112,30,72563,64277,64529,494,29478,940,1,80129,37998,218,68343,60605,493,29478,91090,0,44588,8158,247,58511,41646,602,3697,64147,46212,501,72583,95012,1,84670,44194,611,29478,64277,99730,225,34933,6713,620,563,64277,51516,224,61752,6713,623,53955,64153,89542,135,68343,9820,636,57404,34933,43021,229,68343,80796,648,87026,2219,78803,274,30506,58361,651,52050,18087,74976,277,84670,64529,656,61091,68343,29242,426,64153,15774,702,72389,93555]},awcbb_yhh_fun110:{variablePool:{},zhili:[65870,38549,80129,32545,218,52396,7823,283,26653,283,76204,58960,284,43193,41646,285,58752,64529,349,58854,91090,1,30381,30506,44194,218,10057,23185,595,95311,595,64147,6713,284,23098,64954,285,77380,41646,349,61091,31787,1,8080,68343,32545,218,87026,47300,596,78762,596,88198,60605,284,22541,15774,285,48530,64954,349,82317,7790,1,80837,76204,47300,218,563,58960,283,30033,7315,6,19853,59830,277,72389,90610,0,95311,601,7823,586,53596,78762,247,52980,38959,267,52050,45100,2]},awcbb_yhh_fun111:{variablePool:{awcbb_yhh_fun112:"awcbb_yhh_fun112",awcbb_yhh_fun113:"awcbb_yhh_fun113"},zhili:[80796,607,44194,608,94285,86572,38959,603,9103,64529,218,22541,44194,587,53955,95311,133,52396,23825,78395,2,34299,12,43071,604,30506,58960,297,61091,18922,78,30033,69977,1,45607,26653,124,5497,59830,218,61091,60605,282,99362,58960,133,87026,86716,54335,2,45016,12,80796,605,72623,80796,297,72583,44194,78,93963,2732,1,48422,58511,26653,218,74470,64529,588,10057,64529,528,95311,27,64965,38232,80129,64954,218,29478,64954,588,10057,23185,563,44194,27,5886,85445,30506,64529,218,52050,59830,588,58752,59830,408,62566,606,38232,85408,78762,607,52050,2219,60605,608,87026,46212,609,26396,41646,587,58511,9820,218,72389,57979,587,30033,35714,6713,282,63531,78762,218,52050,43071,282,87026,96762,61752,64954,355,23098,15774,610,50073,47409,2,15774,81,30033,31787,1,18922,199,50073,98239,1]},awcbb_yhh_fun112:{variablePool:{r:null},zhili:[68328,41646,507,55173,9103,32545,218,58752,57979,588,93963,43071,528,18922,119,44380,78309,63531,15774,218,58854,9820,588,57404,46212,563,80796,119,8578,28050,11765,9820,218,93963,64529,588,82317,78762,408,18922,589,4623,97059,95]},awcbb_yhh_fun113:{variablePool:{r:null},zhili:[3749,44194,507,33529,44588,78762,507,23098,7651,57979,135,50073,98239,1]},awcbb_yhh_fun114:{variablePool:{awcbb_yhh_fun115:"awcbb_yhh_fun115",awcbb_yhh_fun116:"awcbb_yhh_fun116"},zhili:[64529,613,9820,618,78808,67320,88198,59830,218,61091,57979,592,48530,47300,593,74470,6568,9169,2,60038,104,62566,603,5497,46212,218,30033,6713,587,30033,32545,133,74470,85353,22695,2,13780,12,78762,604,30506,23185,297,71658,40180,78,48530,69977,1,21204,68343,43071,218,61091,80796,590,72583,18922,563,32545,27,64965,38232,85728,38959,218,50073,95645,590,52396,26653,528,80796,27,47,50046,9103,40180,218,72389,47300,590,71658,58361,408,23185,612,85445,33414,97059,613,52050,67170,43071,618,23098,34933,26653,218,30033,7823,587,48530,64954,619,51599,64277,64954,355,50073,7823,349,22541,69977,1,46212,81,74470,97127,1,41646,199,48530,97127,1]},awcbb_yhh_fun115:{variablePool:{r:null},zhili:[87017,9820,507,33529,88198,6713,218,58752,67404,590,58854,6713,563,15774,119,5886,8080,63531,95645,218,22541,64529,590,82317,74112,528,60605,119,64650,93438,33414,95645,218,30033,6713,590,563,38959,408,47300,591,5659,95645,95]},awcbb_yhh_fun116:{variablePool:{r:null,awcbb_yhh_fun117:"awcbb_yhh_fun117"},zhili:[60605,615,5169,26653,507,71340,8414,95311,218,52396,18922,592,10057,18922,593,53596,58361,615,30033,47300,307,44588,18922,616,71658,47409,2,80611,61752,58361,218,71807,74112,590,87026,57979,528,23185,119,5347,87350,11765,26653,507,3697,95645,294,57404,18777,2,74112,617,58511,47300,297,87026,38959,299,93963,97127,1]},awcbb_yhh_fun117:{variablePool:{},zhili:[66932,80124,30506,74112,218,52050,74112,592,57404,95311,594,58960,119,22433,64529,218,10057,74112,592,50073,67404,594,22541,21798,55019,84581,9103,6713,218,93963,23185,590,71658,62566,408,63531,67404,218,48530,7823,592,30033,62566,594,48530,30381,80796,27,64277,9820,218,52980,67404,592,22541,58960,594,52980,73580,27998,44,84670,40180,218,3697,64954,592,22541,38959,593,71807,85728,74112,614,99362,72218,1,41632,9103,18922,218,82317,57979,590,71807,44194,408,47300,591,52821,2219,23185,218,53955,57979,590,93963,40180,563,64954,119,83991,56020,32545,95,74112,95]},awcbb_yhh_fun118:{variablePool:{r:null,c:null},zhili:[94285,7823,507,9832,5497,78762,507,82317,74112,73,52050,27998,10,5497,44194,507,58752,46212,73,10810,6713,283,3697,25979,2,24153,114,15774,287,89401,74112,587,72623,95311,218,52396,59830,587,48530,71093,11765,6713,284,99362,46212,285,43193,6713,352,87026,7790,2,54009,32545,507,52050,58361,73,52396,89019,62566,27,88198,29242,165,19853,95311,165,10057,63531,43071,166,10810,9820,133,71807,24653,67541,46,64147,40180,516,64153,67404,166,48530,64277,58361,165,52050,72389,71121,44588,40180,516,10810,61752,15774,507,22745,97059,73,3697,64153,97059,516,48530,74470,2219,64954,284,3697,58960,285,22745,67404,352,3697,69977,2,76204,56123,165,52878,-60,54009,6713,507,52396,40180,294,53955,34933,58361,297,58752,15774,299,71807,47409,1,76204,46212,277,71807,27370,0,60380,6713,601,7823,491,64147,41646,247,22745,67404,267,10057,91090,2,30238,15774,621,76204,38959,289,23098,6713,126,71807,97127,1,69301,23185,622,41646,527,5497,9820,247,71807,58960,267,29478,41411,2,75041,7823,298,53596,57979,247,72583,80796,267,93963,97127,1]},awcbb_yhh_fun119:{variablePool:{awcbb_yhh_fun120:"awcbb_yhh_fun120"},zhili:[67404,628,68149,23282,54009,18922,628,58854,32545,629,81395,6713,630,95311,631,59137,95645,632,6713,633,34264,22433,23185,634,53955,41646,635,72583,31787,2,97059,81,61091,76020,1]},awcbb_yhh_fun120:{variablePool:{awcbb_yhh_fun121:"awcbb_yhh_fun121"},zhili:[47300,626,32517,98809,84670,41646,626,3697,6713,627,19853,59830,355,48530,44194,349,74470,74677,1,78762,81,43193,69977,1]},awcbb_yhh_fun121:{variablePool:{r:null},zhili:[39441,57979,507,98809,18922,283,18087,58361,284,22541,15774,285,74470,9820,286,72389,27118,1,84581,80796,595,85408,26653,284,82317,78762,285,71658,43071,286,82317,98239,1,8902,43071,624,58511,78762,284,48530,57979,285,72583,60605,286,29478,58767,1,70312,11765,38959,507,53955,62566,294,87026,59592,2,26653,625,34933,97059,297,71658,6713,299,72583,98239,1,72601,2219,6713,218,93963,67404,283,38959,30,52821,67404,95]},awcbb_yhh_fun122:{variablePool:{r:null,c:null,awcbb_yhh_fun123:"awcbb_yhh_fun123"},zhili:[47300,645,36174,62566,507,38549,60605,637,8414,78803,516,9103,38959,426,52980,76020,0,32275,10,68343,43071,516,58361,638,80611,64153,32545,516,30033,53596,6713,218,29478,15774,600,61752,47300,507,50073,85756,85408,67404,645,10810,63531,46212,507,71807,46212,646,61752,43071,516,77380,9820,647,79230,83270,15713,76204,41646,355,52980,18922,349,23098,98239,1,9820,81,74470,58767,1]},awcbb_yhh_fun123:{variablePool:{B:null,M:null},zhili:[80562,59830,367,72115,72623,80796,367,50073,60605,73,3697,95645,292,30033,15764,19,85408,38959,218,53955,58960,599,53596,6713,367,58854,57979,73,10057,6713,639,93963,38354,24153,63,26653,512,64153,18922,215,43193,97059,216,563,63363,1,68343,33043,640,64147,9820,640,57404,59830,486,68343,7823,367,93963,44194,73,99362,43071,292,563,80837,68343,15774,640,77380,88198,74112,215,58854,47300,641,23098,62566,642,61091,95012,1,94831,9103,57979,215,10810,95311,643,52396,15774,27,57404,23185,644,563,72218,0]},awcbb_yhh_fun124:{variablePool:{r:null,awcbb_yhh_fun125:"awcbb_yhh_fun125"},zhili:[62566,649,33338,86572,6713,624,84670,57979,284,72389,62566,285,57404,74112,349,23098,91504,1,54009,33043,507,85728,64954,507,52050,92021,94604,17,44588,26653,284,52050,80796,405,53955,52058,0,18087,6713,507,52980,59830,406,23098,1908,35837,32,22433,97059,218,30033,95645,597,80129,58361,507,10057,47300,408,87026,80611,84670,32545,218,87026,40180,598,64277,18922,507,52980,15774,598,99362,56020,46212,95,24153,20,11765,44194,649,48530,80796,650,80129,46212,355,82317,80796,349,43193,76020,1,41646,81,50073,940,1]},awcbb_yhh_fun125:{variablePool:{c:null,M:null,B:null},zhili:[26472,58960,516,48183,64153,15774,516,72583,69685,7,85408,59830,516,72389,58960,73,82317,70299,2,49852,135,8414,43071,516,22541,97059,73,99362,64346,67404,27,22433,74976,165,5497,59830,165,50073,5497,78762,166,3697,58960,133,48530,39404,73911,39,72623,26653,640,30506,78762,166,52050,53596,26653,165,57404,74470,81e3,11765,9820,218,23098,72623,9820,640,23098,85728,41646,516,29478,18922,73,61091,19853,58361,640,52980,99362,85445,80129,38924,165,54324,-53,26396,46212,408,67170,41646,516,58854,64529,73,22745,58960,597,30033,21585,78762,598,54009,62566,516,30033,95645,73,72583,26653,598,52980,21585,95645,406,7823,305,40180,306,62566,306,40894,37209,18087,59830,284,23098,7823,405,53955,89391,0,78302,21585,54009,51516,367,7823,624,11765,59830,367,52980,2219,44194,284,82317,74112,285,43193,41646,352,10057,47409,2]},awcbb_yhh_fun126:{variablePool:{},zhili:[15348,4004,46517,652,135,64147,95311,653,10810,95311,654,71807,57979,655,87026,47409,1,41373]},awcbb_yhh_fun127:{variablePool:{r:null,c:null,B:null,M:null,S:null,R:null,Q:null,G:null,awcbb_yhh_fun128:"awcbb_yhh_fun128"},zhili:[32545,701,12239,64954,507,59830,516,81501,33414,44194,505,61091,64147,74976,367,64153,58361,657,29478,58511,28196,640,44588,58960,504,50073,76204,96735,658,34933,44194,659,52980,5497,64189,660,34933,62566,661,71658,88198,93083,662,85728,26653,506,61091,84670,56565,663,2219,26653,508,82317,29362,0,48861,84670,74112,663,30033,90464,57979,470,67718,18922,475,84214,84670,57979,218,58752,58361,283,72583,73069,4,6713,664,60038,2,62566,30,47670,19853,59830,518,563,31787,1,61744,90464,7823,511,44588,44194,701,43193,11765,44194,521,72389,31787,1,85288,41646,508,67404,119,4486,58960,535,36696,60605,470,3410,64147,59830,334,10057,31162,5,88999]},awcbb_yhh_fun128:{variablePool:{awcbb_yhh_fun129:"awcbb_yhh_fun129",awcbb_yhh_fun138:"awcbb_yhh_fun138",awcbb_yhh_fun139:"awcbb_yhh_fun139",awcbb_yhh_fun140:"awcbb_yhh_fun140",awcbb_yhh_fun141:"awcbb_yhh_fun141",awcbb_yhh_fun142:"awcbb_yhh_fun142"},zhili:[23185,680,6713,693,58361,694,23185,696,6713,697,74112,700,78808,89185,62247,58511,32545,218,22541,58361,283,23098,68180,58,11765,18922,508,30033,52058,0,48861,38959,512,63531,64954,574,71658,5952,88198,78762,660,87026,87764,95311,665,80796,666,57481,59557,6713,511,34933,43071,680,93963,76204,64954,521,52050,58767,1,96762,95645,508,62566,119,44605,61752,60605,544,563,5879,3,42991,64153,26653,526,48530,59324,3,73062,463,34933,6713,508,71658,98239,0,58306,80796,512,72623,74112,576,48530,96657,18087,7823,218,563,40180,599,52396,11383,13,58361,30,46212,27,5886,85728,62566,529,48530,29585,2,52189,98,72623,26653,508,3697,95012,0,88698,15774,512,67170,95645,578,77380,36696,64529,477,58361,95,67404,681,44194,682,64147,46212,218,563,18922,600,22745,76996,22695,4,18922,683,76073,2,58361,684,58511,60605,519,57404,29362,1,7823,685,26909,44291,47300,119,44588,38959,479,563,5879,4,46782,64153,47300,662,50073,81395,32545,470,64954,686,61744,78762,687,5497,80796,218,10810,38959,599,22541,64879,64954,95,64529,535,3933,80796,687,42991,64277,74112,544,52980,45100,5,77548,7651,74112,526,29478,58767,3,46792,67404,512,33414,38959,580,22541,54009,62566,218,43193,64954,596,10057,88198,18922,519,72389,69977,1,47300,688,28156,26653,119,68343,26653,479,72389,31787,4,66595,9820,512,90191,97059,470,47300,689,44298,47300,486,76204,7823,218,23098,58960,597,22745,57481,58361,95,47300,535,72623,57979,581,22541,30506,64954,479,71658,91504,5,3410,46212,690,60605,95,62566,691,67404,258,63531,78762,218,93963,95311,598,57404,53903,53596,80796,519,52396,2732,1,23185,692,45035,18135,58361,119,64153,7823,479,87026,29362,4,42991,60605,512,33414,47300,583,71658,52861,8414,57979,658,99362,76029,26653,48,6713,677,44605,95645,562,68343,67404,516,71807,7823,121,23098,85948,18,33414,64529,516,22745,95311,121,33414,64529,693,77380,11748,11765,67404,516,99362,32545,121,93963,75467,32545,549,78762,550,82077,89873,23185,511,34933,32545,694,58854,54009,6713,521,77380,91504,1,85171,78762,508,40180,119,85288,64277,59830,544,10057,45100,3,57126,30506,95311,658,77380,56027,18922,48,43071,299,24361,18922,562,58511,41646,516,87026,41646,124,72389,1853,18,19853,7823,516,61091,46212,124,53596,47300,696,72583,38232,61752,47300,516,3697,60605,124,10057,82077,38959,549,44194,550,75467,83815,23185,511,80129,32545,697,10810,5497,62566,521,22541,98239,1,71093,59830,508,9820,119,96225,34933,57979,544,53955,27370,3,17775,63531,41646,479,77380,5879,3,318,59830,512,22433,58361,585,29478,45787,8414,64954,658,43193,63903,26653,48,95311,677,57481,80796,698,58960,30,9206,97059,562,53596,40180,229,99362,33352,58361,549,9820,550,71606,14266,18922,511,84670,9820,700,71807,44588,97059,521,93963,68185,1,4486,32545,508,38959,119,82077,53596,78762,544,563,940,3,98009,63531,74112,479,99362,59324,3,80570,18087,78762,526,72389,97127,3,40640,70130]},awcbb_yhh_fun129:{variablePool:{awcbb_yhh_fun130:"awcbb_yhh_fun130",awcbb_yhh_fun132:"awcbb_yhh_fun132",awcbb_yhh_fun136:"awcbb_yhh_fun136"},zhili:[95311,671,32545,676,64954,679,33338,38549,39834,2219,44194,640,71807,87764,67404,667,58960,668,33352,82265,62566,511,85728,46212,671,43193,85408,58361,521,52396,58767,1,64879,67404,508,9820,119,99379,85408,64954,544,57404,89391,3,40640,2219,40180,640,82317,68012,78762,667,9820,672,87847,59557,80796,511,7651,64529,676,22541,63531,26653,521,82317,91504,1,24361,58960,508,46212,119,35714,18087,78762,544,563,31162,3,71341,85408,78762,640,77380,95645,95,93283,44194,511,80129,78762,679,72389,85728,64954,521,93963,72218,1,85288,95645,508,95311,119,64879,76204,59830,544,30033,74677,3,89199,25284]},awcbb_yhh_fun130:{variablePool:{awcbb_yhh_fun131:"awcbb_yhh_fun131"},zhili:[44194,670,12239,84405,30582,34933,80796,367,57404,83815,67404,545,19853,41646,218,52396,95645,587,563,82077,97059,546,61752,18922,516,57404,46212,27,10810,4541,18,68343,23185,516,74470,47300,27,53596,32545,670,72583,50046,18087,23185,516,57404,38959,27,77380,82077,32545,549,18922,550,61744,80796,95,9820,535,90466,64954,545,8934,22433,95311,544,29478,27370,5,57126,35812]},awcbb_yhh_fun131:{variablePool:{q:null},zhili:[39441,46212,669,81501,64147,97059,218,52050,46212,587,64277,15774,669,52050,8080,67717]},awcbb_yhh_fun132:{variablePool:{awcbb_yhh_fun133:"awcbb_yhh_fun133",awcbb_yhh_fun134:"awcbb_yhh_fun134"},zhili:[74112,673,46212,675,78808,55173,71527,9103,9820,367,563,83815,58361,545,63531,58361,218,58752,58960,282,29478,75467,80796,546,18087,59830,516,563,41646,119,58752,94604,18,88198,95645,516,53955,62566,119,61752,80796,673,23098,78309,22433,64954,516,87026,43071,119,52050,60637,67404,549,64954,550,87847,6713,558,67170,32545,225,43193,3933,7823,559,591,68343,67404,560,3697,47409,2,72563,78733,40180,561,80129,18922,675,53955,11765,58960,521,71807,68185,1,61744,44194,508,95645,119,25022,43071,535,26972,80796,545,32736,47300,558,42991,64147,41646,544,50073,68185,5,224,53031]},awcbb_yhh_fun133:{variablePool:{q:null},zhili:[32517,57979,669,54997,33414,95311,218,563,97059,282,80129,23185,669,77380,78309,2029]},awcbb_yhh_fun134:{variablePool:{awcbb_yhh_fun135:"awcbb_yhh_fun135"},zhili:[32545,674,15348,89185,30711,84670,60605,658,58854,90191,64954,470,15774,590,96762,58960,562,88198,95645,224,52396,87847,60605,528,30506,40180,218,82317,74112,590,93963,80796,528,71658,4486,60605,556,11765,47300,218,10810,97059,590,74470,67404,563,29478,75467,62733,38959,511,44588,32545,674,58752,72623,43071,521,563,29585,1,44605,7823,508,18922,119,92215,74112,535,30582,97059,528,54884,58361,556,83848,63531,64529,544,72583,2732,5,224,36151]},awcbb_yhh_fun135:{variablePool:{},zhili:[39441,89185,5952,64277,32545,218,52396,41646,590,22745,38959,408,52980,9103,7823,519,29478,31162,1,97059,119,84670,47300,564,22541,89391,2,54884,2504]},awcbb_yhh_fun136:{variablePool:{awcbb_yhh_fun137:"awcbb_yhh_fun137"},zhili:[58361,678,2158,55173,96657,18087,7823,658,61091,41709,80796,48,64529,677,97453,46212,528,64277,62566,218,43193,9820,588,43193,58361,528,72389,4486,38959,556,34933,46212,218,61091,43071,588,10057,95645,563,10057,75467,47300,562,61752,60605,225,29478,44605,26653,549,6713,550,44298,15979,38959,511,88198,43071,678,72583,53596,44194,521,50073,41411,1,85171,43071,508,18922,119,64879,40180,535,32881,47300,528,32736,32545,556,66595,85408,18922,544,29478,41411,5,8934,67717]},awcbb_yhh_fun137:{variablePool:{},zhili:[15348,67320,34228,80129,78762,218,30033,58960,588,30033,97059,408,74470,5497,43071,519,74470,74677,1,18922,119,22433,80796,564,72389,98239,2,32736,93555]},awcbb_yhh_fun138:{variablePool:{q:null},zhili:[64106,44194,669,26569,26653,682,64277,6713,274,61091,55207,1,27484]},awcbb_yhh_fun139:{variablePool:{},zhili:[80562,13021,5952,80796,684,44588,38959,564,71807,74677,1,98009,58735]},awcbb_yhh_fun140:{variablePool:{q:null},zhili:[3306,67404,669,84405,43071,695,68343,9820,274,563,29362,1,35812]},awcbb_yhh_fun141:{variablePool:{},zhili:[5169,48183,35896,58361,683,64153,78762,564,52050,5879,1,318,51512]},awcbb_yhh_fun142:{variablePool:{},zhili:[70676,34962,90466,9820,699,64153,46212,564,563,76020,1,57126,67717]},awcbb_yhh_fun143:{variablePool:{t:null,e:null,awcbb_yhh_fun144:"awcbb_yhh_fun144",awcbb_yhh_fun145:"awcbb_yhh_fun145"},zhili:[60605,710,64529,719,4062,15774,175,40365,54009,15774,710,22541,84670,25791,218,68343,64954,719,61091,79180]},awcbb_yhh_fun144:{variablePool:{},zhili:[83875,93730,64529,709,85408,26653,289,43193,23185,126,58752,98239,1]},awcbb_yhh_fun145:{variablePool:{n:null,o:null,a:null,i:null,awcbb_yhh_fun146:"awcbb_yhh_fun146"},zhili:[58960,718,4062,58361,247,80796,225,72238,11765,60605,504,82317,68343,51516,224,72623,64529,711,72583,8414,15795,135,67170,60605,508,3697,27118,0,7100,32545,512,33414,95645,708,29478,96657,2219,58960,135,87026,62733,64529,712,9820,78,64879,60605,713,40180,714,33352,68012,26653,715,11765,44194,718,71658,76204,38959,521,93963,72218,1,75467,9820,508,47300,119,96762,63531,40180,544,3697,59324,3,69374,18087,74112,526,23098,89391,3,36151]},awcbb_yhh_fun146:{variablePool:{awcbb_yhh_fun147:"awcbb_yhh_fun147"},zhili:[47300,717,70676,86572,67718,88198,38959,224,77380,13252,7823,48,67404,677,71606,47300,562,7651,78762,218,93963,22061,89401,97059,511,30506,64954,717,93963,76204,57979,521,10810,69977,1,72563,44194,508,95311,119,87847,58511,38959,544,30033,940,3,66595,36151]},awcbb_yhh_fun147:{variablePool:{},zhili:[15348,34962,48503,59830,716,18087,95645,564,29478,940,1,15219,966]},awcbb_yhh_fun148:{variablePool:{t:null,e:null,awcbb_yhh_fun149:"awcbb_yhh_fun149",awcbb_yhh_fun150:"awcbb_yhh_fun150"},zhili:[46212,727,57979,766,5169,7823,175,72115,56027,58960,407,64529,30,92215,61752,40180,494,22541,63363,1,34933,8184,218,33414,58361,727,563,19853,95311,501,43193,74677,1,41632,64147,41646,766,23098,66353]},awcbb_yhh_fun149:{variablePool:{},zhili:[68328,96088,95311,639,53596,95645,218,57404,88198,64529,284,93963,95645,403,77380,55207,2]},awcbb_yhh_fun150:{variablePool:{n:null,o:null,a:null,i:null,d:null,f:null,p:null,b:null,awcbb_yhh_fun151:"awcbb_yhh_fun151"},zhili:[95311,765,5169,44194,247,67404,225,40365,54009,23185,504,22745,33414,43021,224,72623,59830,505,58752,30506,15795,135,64147,40180,661,87026,19853,28196,229,80129,78762,502,563,85728,74976,274,85408,74112,503,53955,9103,41837,277,67170,43071,506,72583,22433,49304,426,22433,67404,508,72583,55207,0,69301,30506,18922,426,71658,41709,7823,470,41646,728,85171,93283,44194,511,64277,47300,765,563,54009,74112,521,50073,63363,1,75467,64954,508,47300,119,44298,64153,23185,334,563,31162,3,38302]},awcbb_yhh_fun151:{variablePool:{awcbb_yhh_fun152:"awcbb_yhh_fun152",awcbb_yhh_fun153:"awcbb_yhh_fun153",awcbb_yhh_fun154:"awcbb_yhh_fun154",awcbb_yhh_fun157:"awcbb_yhh_fun157",awcbb_yhh_fun160:"awcbb_yhh_fun160"},zhili:[41646,731,32545,734,6713,737,23185,743,23185,764,55451,50825,30711,64147,58960,135,72583,26396,58361,545,64147,67404,247,58752,47300,729,82317,58960,89,23098,15774,730,99362,92215,40180,546,53596,6713,225,58854,64954,119,72583,85699,18,67170,43071,225,52396,23185,119,68343,26653,731,48530,78309,2219,47300,225,77380,46212,119,53955,71093,47300,548,58960,30,9206,78762,549,95645,550,24361,15774,551,57979,732,25022,57979,553,43071,554,34264,58361,555,9820,30,75467,26653,556,7651,47300,247,52050,7823,729,43193,41646,89,58854,58960,527,22745,95645,528,23098,71606,44194,557,7823,119,72563,6713,558,18087,60605,225,77380,64529,121,72583,78800,28,85408,40180,225,72389,15774,121,44588,95311,734,52050,71527,41646,559,57126,68343,23185,560,563,31162,2,8087,7651,59830,225,23098,26653,121,74470,35714,13252,32545,561,67170,41646,737,93963,8414,62566,521,53955,5879,1,75467,38959,508,26653,119,57481,38959,535,26972,58361,545,43150,95645,556,47670,7651,46212,544,72583,31787,5,8934,95311,27,44588,80796,247,53955,43071,729,53955,15774,89,10057,58361,738,82317,64529,133,22541,34213,78395,44,67170,95311,508,22541,63363,0,14239,33414,60605,277,57404,14266,95311,10,38959,119,97453,76029,40180,511,11765,47300,743,58752,18087,41646,521,74470,97127,1,22061,41646,508,41646,119,71093,67170,38959,334,53955,31787,3,29908,47,85728,41646,508,43193,27118,0,8902,8414,95645,277,22541,11451,64954,10,40180,27,85171,58361,744,62566,535,60637,90191,58361,511,84670,47300,764,72389,85728,32545,521,58752,7790,1,35714,58361,508,44194,119,16485,72623,15774,334,10057,47409,3,84051,9467]},awcbb_yhh_fun152:{variablePool:{r:null},zhili:[55451,95645,507,81501,9103,41646,247,53955,38959,729,48530,41646,89,77380,43071,730,58511,67404,507,23098,4623,27484]},awcbb_yhh_fun153:{variablePool:{r:null},zhili:[65870,58960,507,40365,95645,733,54009,74112,247,563,38959,729,52396,97059,293,74470,41411,1,5377]},awcbb_yhh_fun154:{variablePool:{awcbb_yhh_fun155:"awcbb_yhh_fun155",awcbb_yhh_fun156:"awcbb_yhh_fun156"},zhili:[38959,735,38959,736,55451,38549,30711,85408,64529,224,58854,15979,32545,562,85408,23185,225,72583,40180,27,23098,47229,18,19853,59830,225,72583,18922,27,64147,58960,735,10057,56020,7651,47300,225,52050,58960,27,52050,71093,60605,528,33414,38959,247,74470,32545,729,71807,95311,89,71658,6713,527,48530,58361,528,29478,96762,6713,556,30506,59830,247,99362,97059,729,52980,80796,89,72583,40180,527,48530,57979,563,61091,24361,83916,97059,511,58511,38959,736,48530,5497,67404,521,3697,89391,1,59137,46212,508,64954,119,9206,6713,535,21715,38959,528,69374,97059,556,591,85408,60605,544,43193,7790,5,62205,45243]},awcbb_yhh_fun155:{variablePool:{r:null},zhili:[80562,44194,507,67320,9820,733,72623,6713,247,10810,59830,729,71658,95311,293,57404,31162,1,36151]},awcbb_yhh_fun156:{variablePool:{},zhili:[36174,23282,71527,11765,59830,247,52980,15774,729,563,80796,89,3697,78762,527,22541,74112,408,72389,30506,64954,519,48530,58767,1,62566,119,72623,46212,564,99362,58767,2,77,67717]},awcbb_yhh_fun157:{variablePool:{awcbb_yhh_fun158:"awcbb_yhh_fun158",awcbb_yhh_fun159:"awcbb_yhh_fun159"},zhili:[62566,741,38959,742,39441,86572,96657,64153,95311,247,48530,7823,729,53955,95311,89,50073,9820,527,72583,46212,528,93963,42624,54,64277,95311,508,99362,27118,0,8902,64277,67404,274,93963,83916,47300,10,58361,27,60637,7823,477,95645,305,60637,58960,470,47300,739,71606,81395,67404,511,63531,38959,741,3697,61752,43071,521,71658,47409,1,75467,60605,508,44194,119,21585,88198,43071,334,43193,58767,3,18564,62,67170,58960,508,71807,47409,0,70312,88198,26653,274,72389,76029,26653,10,97059,119,33352,47300,470,7823,530,34264,6713,531,97059,532,82077,43071,533,80796,415,25022,41646,534,9820,535,71093,56027,40180,511,85728,44194,742,23098,18087,38959,521,48530,27118,1,44298,15774,508,97059,119,75467,63531,62566,334,3697,91504,3,84214,966]},awcbb_yhh_fun158:{variablePool:{},zhili:[68328,40365,10151,18922,512,41709,6713,470,95311,740,25022,32545,486,85728,78762,218,82317,74112,407,71807,33352,44194,95,6713,535,88198,57979,722,23098,76204,18922,479,71658,29585,5,83848,70130]},awcbb_yhh_fun159:{variablePool:{},zhili:[2158,21418,5952,64529,512,54009,58361,723,22541,62247,7651,44194,724,563,8934,44194,477,18087,44194,725,52396,33414,95311,247,52396,43071,729,10810,44194,89,563,97059,536,58854,64529,537,52050,19853,74112,519,563,74677,1,62566,119,11765,59830,479,10057,940,4,57126,2219,18922,479,61091,41411,3,40640,64954,512,53596,74112,726,30033,33414,46212,247,10057,60605,729,93963,59830,89,77380,18922,538,23098,38959,408,10810,64147,95311,519,82317,31787,1,64954,119,9103,15774,479,52050,2732,4,84214,25284]},awcbb_yhh_fun160:{variablePool:{awcbb_yhh_fun161:"awcbb_yhh_fun161",awcbb_yhh_fun163:"awcbb_yhh_fun163"},zhili:[38959,757,64529,763,83875,9832,35896,97059,27,44380,61752,40180,508,52050,45100,1,33457,9103,23185,513,43193,80796,95,33414,7823,247,22745,46212,729,53955,40180,89,87026,40180,738,22541,64277,41646,757,77380,64153,18922,524,87026,29585,2,18922,525,8414,58960,526,82317,29362,4,17775,8414,41646,274,48530,82265,43071,531,46212,305,96762,41646,470,26653,758,57481,78733,44194,511,2219,7823,763,57404,67170,6713,521,29478,91504,1,59137,97059,508,26653,119,44298,53596,43071,544,22541,940,3,46792,79180]},awcbb_yhh_fun161:{variablePool:{r:null,c:null,awcbb_yhh_fun162:"awcbb_yhh_fun162"},zhili:[41646,756,83875,7823,507,57979,516,80124,5497,26653,508,82317,27118,0,45196,64153,67404,274,77380,68012,60605,531,64954,745,44298,38959,533,64529,745,59137,58960,534,58960,535,82077,41646,746,95645,747,71093,59830,748,57979,314,97453,59830,10,88198,6713,507,58854,80796,749,82317,97453,64954,470,32545,686,87847,1443,41646,511,64277,46212,756,50073,30506,60605,521,10810,59324,1,99379,32545,508,78762,121,99379,9820,366,64153,6713,334,22541,91090,4,99221]},awcbb_yhh_fun162:{variablePool:{},zhili:[64106,9832,21715,19853,64529,229,74470,13252,43071,687,64147,46212,507,77380,80796,750,99362,35714,26653,751,95311,752,85171,9820,753,64147,78762,247,10810,38959,729,61091,97059,89,23098,74112,754,58854,96762,41646,755,22433,67404,516,43193,33352,78762,95,18922,535,62247,26653,687,8934,7823,753,224,38959,755,77548,44588,95311,544,52396,89391,5,46792,93777]},awcbb_yhh_fun163:{variablePool:{awcbb_yhh_fun164:"awcbb_yhh_fun164",awcbb_yhh_fun165:"awcbb_yhh_fun165"},zhili:[47300,760,95645,762,3306,23282,67057,9103,64529,224,99362,56027,43071,562,30506,64954,225,72389,62566,124,72583,91422,18,22433,46212,225,99362,15774,124,84670,40180,760,10810,85445,61752,41646,225,10810,41646,124,563,44298,97059,698,38959,30,24361,59557,44194,511,2219,41646,762,57404,7651,18922,521,74470,41411,1,33352,9820,508,60605,119,72563,84670,80796,544,29478,55207,3,57126,51512]},awcbb_yhh_fun164:{variablePool:{r:null},zhili:[12239,47300,507,41474,32545,759,85408,64954,247,53955,67404,729,50073,40180,267,3697,47409,1,9467]},awcbb_yhh_fun165:{variablePool:{},zhili:[17105,67320,41421,7823,761,85728,95311,564,58854,76020,1,71341,35812]},awcbb_yhh_fun166:{variablePool:{t:null,e:null,awcbb_yhh_fun167:"awcbb_yhh_fun167",awcbb_yhh_fun168:"awcbb_yhh_fun168"},zhili:[32545,777,60605,793,12239,57979,175,72115,11451,59830,407,95645,30,35714,9103,80796,494,87026,27370,1,80129,8184,218,9103,95311,777,563,61752,40180,501,52396,63363,1,72601,64277,58361,793,53955,21204]},awcbb_yhh_fun167:{variablePool:{},zhili:[4062,67320,18922,776,80129,9820,218,22745,64153,44194,284,99362,57979,403,48530,97127,2]},awcbb_yhh_fun168:{variablePool:{n:null,o:null,a:null,i:null,d:null,f:null,p:null,awcbb_yhh_fun169:"awcbb_yhh_fun169"},zhili:[95645,792,78808,7823,247,62566,225,50825,33414,67404,504,82317,7651,99730,224,67170,74112,505,53955,64147,79263,135,85408,78762,502,52050,64147,41837,229,53596,26653,503,30033,19853,75421,274,72623,6713,506,23098,80129,56565,277,22433,59830,508,74470,72218,0,41632,68343,41646,277,52396,59557,80796,470,64954,728,85171,90191,9820,511,88198,26653,792,99362,68343,47300,521,22541,47409,1,71606,58361,508,60605,119,97453,22433,74112,334,52396,5879,3,9467]},awcbb_yhh_fun169:{variablePool:{awcbb_yhh_fun170:"awcbb_yhh_fun170",awcbb_yhh_fun171:"awcbb_yhh_fun171",awcbb_yhh_fun172:"awcbb_yhh_fun172",awcbb_yhh_fun175:"awcbb_yhh_fun175",awcbb_yhh_fun178:"awcbb_yhh_fun178"},zhili:[95645,779,26653,781,23185,784,74112,788,26653,791,60433,4004,41421,8414,74112,135,53955,36422,64954,545,72623,80796,247,29478,7823,729,53955,43071,89,71807,58960,778,30033,16485,6713,546,8414,9820,225,22541,38959,119,61091,4541,18,64277,9820,225,50073,43071,119,80129,9820,779,23098,78309,5497,41646,225,22745,44194,119,10057,44605,38959,548,78762,30,97453,97059,549,62566,550,64879,46212,551,64954,780,25022,38959,553,74112,554,61744,78762,555,9820,30,33352,80796,556,44588,40180,247,93963,62566,729,50073,47300,89,61091,64954,527,52396,41646,528,71658,24361,44194,557,64529,119,96762,60605,558,58511,58361,225,58752,78762,121,71807,80250,28,44588,9820,225,87026,18922,121,5497,7823,781,72389,61241,57979,559,66595,19853,40180,560,3697,47409,2,85445,84670,7823,225,74470,23185,121,71807,71606,68012,57979,561,80129,78762,784,10057,85408,6713,521,74470,45100,1,57481,64529,508,47300,119,75467,9820,535,26972,46212,545,46782,7823,556,83848,64147,44194,544,52050,29362,5,46782,97059,27,72623,9820,247,93963,18922,729,50073,32545,89,10057,40180,785,77380,62566,133,52396,74651,35828,44,22433,78762,508,48530,41411,0,75041,72623,95311,274,10810,81395,44194,10,9820,119,85288,93283,62566,511,33414,80796,788,52396,85408,47300,521,58752,76020,1,34264,18922,508,40180,119,75467,63531,74112,334,52396,68185,3,18564,47,61752,38959,508,77380,29362,0,43823,2219,78762,274,22745,90464,43071,10,38959,27,4486,26653,744,64529,535,97453,83916,60605,511,30506,80796,791,61091,8414,6713,521,563,89391,1,22061,7823,508,26653,119,64879,5497,43071,334,29478,55207,3,40640,93555]},awcbb_yhh_fun170:{variablePool:{b:null},zhili:[2158,58960,426,26569,84670,95645,247,77380,6713,729,52980,47300,89,10810,18922,778,2219,57979,426,48530,8087,88999]},awcbb_yhh_fun171:{variablePool:{b:null},zhili:[36174,32545,426,81501,26653,496,23185,776,54009,15774,247,10810,95311,729,52050,58361,293,57404,91090,2,79180]},awcbb_yhh_fun172:{variablePool:{awcbb_yhh_fun173:"awcbb_yhh_fun173",awcbb_yhh_fun174:"awcbb_yhh_fun174"},zhili:[18922,782,64529,783,70676,26431,62247,44588,43071,224,57404,13252,60605,562,63531,59830,225,48530,80796,27,23098,10826,18,11765,43071,225,10057,9820,27,5497,18922,782,71807,88857,53596,23185,225,52980,80796,27,29478,87847,58960,528,84670,67404,247,52396,7823,729,29478,78762,89,82317,78762,527,71807,9820,528,563,75467,74112,556,8414,67404,247,10810,97059,729,29478,7823,89,99362,80796,527,52980,32545,563,99362,25022,26396,47300,511,64277,44194,783,82317,76204,41646,521,72583,29362,1,82077,58960,508,95311,119,96762,62566,535,67718,95311,528,47670,15774,556,71341,64277,64529,544,3697,27370,5,89199,93555]},awcbb_yhh_fun173:{variablePool:{b:null},zhili:[64106,95645,426,33529,95311,496,60605,776,8414,46212,247,72389,59830,729,10057,46212,293,58854,72218,2,36151]},awcbb_yhh_fun174:{variablePool:{},zhili:[64106,48183,86453,5497,95645,247,30033,23185,729,3697,9820,89,52050,59830,527,87026,97059,408,50073,19853,6713,519,43193,27370,1,32545,119,85408,47300,564,52396,68185,2,40640,53031]},awcbb_yhh_fun175:{variablePool:{awcbb_yhh_fun176:"awcbb_yhh_fun176",awcbb_yhh_fun177:"awcbb_yhh_fun177"},zhili:[64529,786,62566,787,55451,27863,30711,53596,7823,247,72389,44194,729,10810,6713,89,10057,44194,527,29478,59830,528,71807,73109,54,30506,64954,508,22541,91090,0,45196,63531,80796,229,23098,11451,64954,10,38959,27,60637,40180,477,7823,305,87847,32545,470,23185,739,4486,62733,57979,511,67170,40180,786,61091,11765,57979,521,563,47409,1,44605,80796,508,9820,119,72563,9103,6713,334,43193,98239,3,63144,62,64277,44194,508,23098,52058,0,72601,9103,38959,229,87026,78733,64954,10,43071,119,64879,44194,470,9820,530,71606,15774,531,97059,532,71093,44194,533,80796,415,92215,80796,534,64954,535,71093,62733,7823,511,9103,95311,787,23098,63531,43071,521,52980,72218,1,85171,59830,508,23185,119,85288,64153,59830,334,77380,27118,3,84051,2029]},awcbb_yhh_fun176:{variablePool:{},zhili:[26779,27863,71527,60605,512,76035,67404,470,26653,740,99379,32545,486,64277,9820,218,61091,9820,407,10057,22061,6713,95,18922,535,64277,60605,770,10810,54009,46212,479,61091,69977,5,77548,41373]},awcbb_yhh_fun177:{variablePool:{},zhili:[94285,54997,67718,38959,512,8414,6713,771,77380,10151,88198,58361,772,563,15219,78762,477,5497,80796,773,61091,18087,64954,247,43193,18922,729,57404,38959,89,52396,40180,536,52396,26653,537,77380,5497,41646,519,77380,52058,1,32545,119,76204,64529,479,72389,41411,4,98009,84670,64529,479,43193,52058,3,98009,23185,512,61752,58361,775,10057,67170,9820,247,72389,46212,729,50073,95311,89,72583,60605,538,23098,23185,408,3697,88198,64529,519,10057,31787,1,60605,119,5497,43071,479,52050,74677,4,17775,82306]},awcbb_yhh_fun178:{variablePool:{awcbb_yhh_fun179:"awcbb_yhh_fun179"},zhili:[95311,790,87017,41474,99151,8414,58361,229,43193,68012,18922,531,59830,305,61744,60605,470,67404,789,99379,63903,64954,511,72623,95311,790,50073,9103,97059,521,58752,45100,1,99379,15774,508,43071,119,22061,8414,9820,544,43193,47409,3,591,74054]},awcbb_yhh_fun179:{variablePool:{},zhili:[66932,84405,34228,64529,512,29172,59830,486,5497,95311,247,22745,41646,729,61091,64954,89,52980,97059,785,29478,71093,78762,95,95645,535,2219,18922,769,72583,58511,80796,479,10057,55207,5,89199,70130]},awcbb_yhh_fun180:{variablePool:{t:null,e:null,n:null},zhili:[68149,15774,175,78762,218,64954,247,89185,5497,9820,175,77380,64954,798,48530,47300,799,10810,70299,8,76204,41646,247,3697,69977,0,77765,29,15774,283,2219,97059,444,72583,43071,445,29478,41411,1,42624,10,47300,288,85728,23185,247,52980,31162,1,18564,6,76204,23185,247,53955,31787,0]},awcbb_yhh_fun181:{variablePool:{t:null,e:null,n:null,awcbb_yhh_fun182:"awcbb_yhh_fun182"},zhili:[74112,838,68328,95645,175,33529,18087,95311,838,58854,88198,62566,839,52050,53596,74112,0,52050,52058,0,47300,52,52396,47409,2,93555]},awcbb_yhh_fun182:{variablePool:{_context:null},zhili:[39441,9820,818,86572,60605,119,85085,322,19853,7823,818,77380,18922,172,7651,32545,818,72389,59830,65,23098,25655,11765,7823,818,52980,43071,172,10057,60605,27,40180,124,57979,185,7823,835,62566,27,32545,836,6713,836,32545,837,39027,4,9820,283,80129,26653,284,82317,38959,285,50073,6713,349,43193,41411,1,519,34,44588,6713,175,87026,15774,819,77380,59830,820,7823,283,84670,7823,284,77380,6713,285,563,67404,349,10057,95012,1,11125,30506,97059,175,22541,40180,819,53955,95645,820,87026,60605,348,67170,41646,284,3697,95645,285,22745,57979,349,53955,97127,1,92588,34,64277,58960,175,53955,95311,819,563,43071,821,60605,348,33414,26653,284,30033,41646,285,72389,78762,349,61091,45100,1,5659,72623,9820,175,48530,64954,819,58854,40180,821,52980,38959,610,61752,95645,175,10810,40180,62,50073,61479,25219,14,7651,40180,175,29478,78762,819,48530,58361,822,26653,823,85756,13780,52,26653,824,22433,18922,175,72583,58361,819,77380,9820,822,82317,54054,75288,37,9103,59830,175,43193,64954,819,71658,74112,822,97059,825,38354,61752,9820,175,29478,7823,292,68343,60605,175,29478,78762,292,77380,34933,95645,826,10810,6713,448,58752,47409,1,11125,43071,95,59830,95,9103,38959,218,64954,827,52431,84670,74112,247,64153,67404,828,10057,1390,0,30381,6713,95,18922,66,63531,41646,218,48530,84670,23185,247,77380,64529,829,3697,63363,1,84581,7651,40180,175,52980,18922,819,22745,7823,830,60605,831,80837,18087,23185,175,52980,18922,819,99362,47300,832,85408,67404,284,10057,95645,405,3697,98239,0,7823,833,83813,64153,64529,247,563,60605,834,71807,72218,1,85756,88198,41646,175,23098,53596,67404,818,52396,58960,100,10810,69977,2,27484,67170,95311,818,22745,32545,178,58854,63363,0,2029,18631,-326]},awcbb_yhh_fun183:{variablePool:{t:null},zhili:[80562,46212,175,26431,95311,842,30506,57979,634,58752,15774,843,72389,72218,1,60380,85408,67404,175,74470,88198,47300,155,58854,43071,70,82317,5879,1,88999]},awcbb_yhh_fun184:{variablePool:{t:null,e:null,awcbb_yhh_fun185:"awcbb_yhh_fun185",awcbb_yhh_fun186:"awcbb_yhh_fun186",awcbb_yhh_fun187:"awcbb_yhh_fun187",awcbb_yhh_fun188:"awcbb_yhh_fun188",awcbb_yhh_fun189:"awcbb_yhh_fun189",awcbb_yhh_fun190:"awcbb_yhh_fun190"},zhili:[15774,859,60605,869,74112,870,38959,872,7823,873,38959,875,68149,74112,175,13021,60605,848,80129,15774,175,52396,40180,849,57404,97059,62,77380,99621,51849,2,47329,5,11765,26653,175,3697,2504,23185,291,44588,9820,175,30033,6713,850,99362,3414,11078,19,18087,62566,175,563,44194,292,71658,64147,38959,155,57404,64529,70,10057,63363,1,74054,73062,543,97059,291,33414,64954,175,61091,18922,292,10810,59830,282,93963,37867,22695,2,55513,8,30506,38959,175,61091,6713,292,72583,21204,5952,58960,281,62205,57979,851,89199,43071,852,17775,59830,853,66595,9820,854,224,9820,855,69374,46212,856,57126,63531,93083,218,18922,857,95645,858,34933,59830,284,82317,74112,285,99362,62566,349,52980,69977,1,9228,4411,457,74112,858,78762,857,63531,95311,284,52396,58960,285,72583,58960,352,72389,95012,2,33457,85728,38959,859,93963,57979,307,64147,43071,860,22541,31162,2,70312,9820,119,29,54009,38959,175,43193,80796,292,58752,9820,282,29478,84670,47300,218,74470,74112,342,52050,31787,1,78977,22695,86,9820,861,64153,67404,175,58854,58361,292,74470,97059,282,43193,45973,96274,21,61752,95311,175,23098,58361,292,10810,80796,294,57404,8414,44194,634,61091,60605,843,43193,98239,1,13780,48,88198,58361,284,563,15774,285,50073,97059,286,71807,72218,0,15276,40180,288,5497,40180,289,52980,9820,126,23098,63363,1,94831,76204,41646,175,22541,59830,292,29478,95645,294,43193,1853,2,78762,862,2219,47300,634,99362,57979,843,72389,74677,1,45016,314,26653,281,53596,74112,175,74470,58960,292,23098,6713,282,61091,7304,11078,263,78762,853,88198,26653,175,43193,47300,292,58752,47300,282,43193,7304,31824,203,41646,854,9103,80796,175,52980,78762,292,30033,38959,282,43193,18852,73069,143,64529,855,11765,41646,175,22541,60605,292,58752,74112,282,563,7304,70299,84,58361,856,44588,62566,175,57404,26653,292,22541,95645,282,53955,14731,51849,25,84670,38959,175,52396,23185,292,57404,46212,294,74470,43341,2,95645,862,18087,80796,634,52396,46212,843,58854,5879,1,37455,42,85408,78762,175,71658,46212,292,82317,78762,294,77380,94604,2,62566,863,89401,58361,630,41646,631,99379,62566,864,62566,119,71268,61744,6713,865,61752,23185,869,30033,44298,88198,26653,634,71658,40180,843,29478,97127,2,18631,42,61752,59830,175,57404,64954,292,50073,80796,294,57404,85699,2,6713,862,83916,64529,630,64954,631,71606,74112,864,32545,119,39732,87847,26653,865,72623,64529,870,10057,21585,85728,62566,634,74470,74112,843,563,58767,2,60763,43,18087,58960,872,53955,85728,40180,175,29478,40180,292,74470,43071,294,57404,16816,2,23185,862,93283,6713,630,80796,631,96762,38959,632,97059,633,33352,61752,62566,634,23098,7823,635,52396,52058,2,64954,199,57404,27118,1,60038,43,11765,41646,873,52050,53596,64954,175,77380,67404,292,10057,46212,294,87026,35096,2,74112,862,1443,59830,630,15774,631,4486,38959,632,78762,633,99379,9103,74112,634,58752,9820,635,74470,58767,2,41646,81,43193,68185,1,73062,36,7651,57979,175,10810,64529,292,43193,58361,294,82317,10667,2,47300,874,13252,95645,630,95311,631,44298,58361,865,85408,43071,875,52980,61744,53596,95311,634,23098,95311,843,72583,74677,2,58361,95,63531,95645,175,71658,62566,292,50073,44588,9820,155,50073,26653,70,563,95012,1,45243]},awcbb_yhh_fun185:{variablePool:{},zhili:[68328,71340,46212,858,7651,95645,284,57404,62566,285,72389,58960,286,87026,27118,1]},awcbb_yhh_fun186:{variablePool:{},zhili:[36174,41474,8414,9820,866,48530,46212,867,58752,78762,868,10057,2732,0]},awcbb_yhh_fun187:{variablePool:{},zhili:[96628,81501,64277,9820,175,52980,6713,292,87026,64954,73,93963,38959,796,10057,27998,22,76204,15774,175,23098,18922,292,58752,15774,73,10810,47300,796,72583,58511,95645,289,71658,47300,126,72583,91504,1]},awcbb_yhh_fun188:{variablePool:{},zhili:[17105,26569,58960,119,71151,18087,18922,289,72583,58361,871,87026,5879,1]},awcbb_yhh_fun189:{variablePool:{},zhili:[87017,23282,64529,119,30382,53596,23185,289,29478,43071,871,23098,940,1]},awcbb_yhh_fun190:{variablePool:{},zhili:[2158,80124,97059,283,68343,80796,284,74470,60605,285,29478,95311,286,61091,72218,1,75041,57979,595,54009,60605,284,71807,58361,285,30033,23185,286,48530,68185,1,33457,78762,288,72623,60605,289,52050,97059,126,22541,27118,1]},awcbb_yhh_fun191:{variablePool:{t:null},zhili:[64106,80796,175,27863,33414,6713,175,99362,43071,877,50073,60605,292,93963,41646,294,99362,61752,67404,634,53955,38959,843,30033,29362,1,14239,22433,18922,175,77380,97059,877,52980,67404,292,3697,88198,97059,155,22541,59830,70,53955,940,1,2504]},awcbb_yhh_fun192:{variablePool:{t:null,e:null,n:null,awcbb_yhh_fun193:"awcbb_yhh_fun193",awcbb_yhh_fun195:"awcbb_yhh_fun195",awcbb_yhh_fun197:"awcbb_yhh_fun197"},zhili:[43071,888,44194,893,97059,905,33338,43071,175,4004,9103,7823,493,58854,940,0,34933,65802,218,44588,58361,888,23098,33414,79263,247,44588,80796,893,563,7651,43071,501,58854,47409,1,75041,64277,57979,905,61091,38302]},awcbb_yhh_fun193:{variablePool:{o:null,a:null,awcbb_yhh_fun194:"awcbb_yhh_fun194"},zhili:[64954,884,32517,97059,225,93730,34933,44194,884,71807,85728,64529,218,87026,38959,89,77380,74112,885,93963,38959,886,53955,76020,1,5497,49304,224,88198,57979,224,72389,59830,27,58854,60605,887,30033,23671,35,43071,601,85408,59830,224,74470,78762,27,72583,9820,749,29478,5497,74112,218,74470,46212,267,10057,31162,2,75284,33414,38959,225,74470,85408,95645,289,57404,58361,126,72583,41411,1]},awcbb_yhh_fun194:{variablePool:{i:null},zhili:[66932,26653,135,84405,68343,59830,225,71807,64153,44194,135,30033,46212,749,82317,23179,19567]},awcbb_yhh_fun195:{variablePool:{o:null,awcbb_yhh_fun196:"awcbb_yhh_fun196"},zhili:[38959,892,26472,96088,11765,44194,866,77380,44194,867,22541,74112,889,3697,85728,75421,225,19853,9820,225,26653,119,8414,64529,225,29478,58361,890,72389,89391,1,27557,95311,601,67170,32545,225,52396,8414,15774,218,29478,9820,267,93963,31162,2,75284,76204,23185,892,23098,72623,60605,284,71807,67404,397,10810,45100,1,41632,33414,58361,218,72389,34933,58361,284,71807,64529,347,3697,45100,1]},awcbb_yhh_fun196:{variablePool:{a:null},zhili:[3749,15774,224,55173,9820,891,54009,47300,224,87026,98535,80958,13,72623,60605,218,22745,85728,64529,284,58752,58361,347,10057,5879,1]},awcbb_yhh_fun197:{variablePool:{o:null,a:null,i:null,d:null,f:null,awcbb_yhh_fun198:"awcbb_yhh_fun198"},zhili:[78762,904,3306,62566,225,95311,224,26431,44588,18922,504,22745,68343,25791,135,38959,894,85408,58361,895,10057,7790,1,85728,15795,229,7651,23185,896,58854,67170,33043,274,84670,32545,508,87026,91504,0,75284,26653,512,8414,64529,880,72583,58440,19853,7823,274,82317,62733,46212,509,41709,41646,897,6713,898,97453,9206,40180,470,57979,899,97453,29172,47300,511,53596,6713,904,77380,88198,15774,521,71658,45100,1,99379,7823,508,58960,119,82077,85408,59830,544,93963,95012,3,17775,11765,26653,526,52396,68185,3,45243]},awcbb_yhh_fun198:{variablePool:{awcbb_yhh_fun199:"awcbb_yhh_fun199"},zhili:[18922,903,80562,23282,34228,7823,512,64147,57979,882,82317,93754,67404,27,77519,53596,7823,508,10057,27118,1,45196,64153,67404,513,71658,62566,95,84670,58960,218,77380,64147,80796,514,93963,2732,1,43071,89,22541,38959,885,71807,64153,6713,903,52980,2219,18922,524,3697,63363,2,74112,525,61752,6713,526,53955,2732,4,66595,7651,95311,479,22745,2732,3,89199,8414,40180,229,3697,61752,41646,544,23098,90610,1,77548,67717]},awcbb_yhh_fun199:{variablePool:{p:null,awcbb_yhh_fun200:"awcbb_yhh_fun200",awcbb_yhh_fun201:"awcbb_yhh_fun201"},zhili:[57979,901,80796,902,4062,67404,277,67320,68343,67404,508,82317,58767,0,45196,18087,95645,135,10810,81395,80796,470,38959,900,75467,15774,698,18087,18922,277,29478,26653,887,53955,59137,23185,10,68343,64954,277,563,41646,749,30033,33352,62566,562,72623,59830,901,22745,22061,11451,6713,511,85408,26653,902,71807,19853,15774,521,52980,31787,1,64879,38959,508,41646,121,35714,95311,522,71527,58361,698,77548,59830,562,83848,54009,38959,334,23098,76020,5,21204]},awcbb_yhh_fun200:{variablePool:{b:null},zhili:[26779,80796,426,34962,85728,38959,277,77380,95645,749,23098,44588,40180,247,53955,58767,1,36151]},awcbb_yhh_fun201:{variablePool:{},zhili:[64106,89185,30711,80129,9820,277,3697,97059,143,30033,58511,9820,519,23098,76020,1,6713,119,58511,6713,564,74470,940,2,17775,5377]},awcbb_yhh_fun202:{variablePool:{t:null,e:null},zhili:[26472,97059,175,15774,218,21418,64153,18922,218,58854,26653,175,94867,66706,10,46212,776,64153,60605,218,3697,80796,175,52396,61681,24642,46,33414,41646,218,10057,80796,175,9454,38269,7,2219,95645,218,74470,7823,175,54253,80129,32545,218,93963,68343,58960,175,99362,95645,515,53955,40180,126,58854,95012,1,15276,76204,47300,175,58752,18922,495,58960,30,27557,67404,95,37455,14,22433,18922,175,23098,58960,785,19853,64529,218,52980,7823,294,72389,27557]},awcbb_yhh_fun203:{variablePool:{t:null,e:null},zhili:[26779,74112,175,58361,218,71340,68343,15774,175,22541,46212,785,64153,40180,218,53955,38354]},awcbb_yhh_fun204:{variablePool:{t:null,e:null},zhili:[26779,80796,175,95311,218,33529,61752,95311,175,563,38959,253,88198,62566,218,58854,85445]},awcbb_yhh_fun205:{variablePool:{t:null},zhili:[68149,58960,175,93730,67170,78762,175,52980,97059,515,30033,46212,133,47300,27,38354]},awcbb_yhh_fun206:{variablePool:{t:null,e:null},zhili:[65870,38959,175,60605,218,72238,34933,80796,175,563,97059,738,54009,60605,218,93963,66095]},awcbb_yhh_fun207:{variablePool:{t:null,e:null},zhili:[78808,47300,175,95645,218,23282,53596,7823,175,72389,60605,754,30506,64954,218,23098,5659]},awcbb_yhh_fun208:{variablePool:{t:null,e:null},zhili:[66887,32545,175,41646,218,89185,9103,57979,175,87026,68343,95311,218,53955,52396,97059,528,97059,27,64650,27557,54009,64529,175,48530,54009,59830,218,52396,563,67404,408,7823,612,28050,54009,15774,175,23098,30506,47300,218,93963,93963,23185,563,58960,27,77519,56020,2219,43071,175,82317,46212,536,52050,41646,593,95645,95,87350,22433,97059,175,52980,58361,538,77380,64954,593,97059,95,93438,63531,9820,175,72583,6713,536,61091,60605,537,62566,527,5497,80796,218,563,66888,50472,4,80796,936,45016,2,67404,918,78309,58361,95]},awcbb_yhh_fun209:{variablePool:{t:null,e:null,n:null},zhili:[17105,43071,175,95645,218,38549,88198,44194,175,87026,6713,885,82317,60242,97059,27,63531,65921,165,9103,80796,165,3697,63531,64954,166,71658,43071,133,87026,39404,1295,78,64153,58960,247,80129,78762,166,53955,30506,57979,165,72389,71658,20350,64277,6713,218,563,76204,7823,175,43193,95645,885,30033,44588,15774,247,10057,29478,15774,749,30033,91103,54335,20,76204,38959,175,61091,38959,885,10057,84670,64529,247,82317,82317,38959,887,58960,27,44380,80837,77765,18,5497,7823,175,23098,62566,885,22745,8414,57979,247,29478,71658,64529,887,15774,119,30732,38354,7651,90908,165,52189,-92]},awcbb_yhh_fun210:{variablePool:{t:null},zhili:[80562,26653,175,54997,54009,57979,175,22745,7823,527,43193,78762,528,7823,119,5886,27781,34933,64954,175,29478,95645,527,58854,95311,408,58960,910,27781,84670,32545,175,99362,38959,527,43193,74112,563,15774,119,91582,27557,11765,7823,175,52050,80796,536,71658,80796,593,72583,17054,43,84670,18922,175,52980,41646,536,74470,38959,593,61091,9103,23185,614,52396,29585,1,8902,72623,80796,175,52050,80796,536,43193,47300,917,15774,27,80611,5497,60605,175,82317,58960,536,77380,44194,919,41646,27,11748,23185,95,44588,23185,175,50073,47300,536,29478,18922,537,32545,30,66095,33414,26653,175,10057,74112,538,87026,38959,593,52396,89484,43,67170,26653,175,50073,38959,538,57404,97059,593,43193,44588,58361,614,52050,72218,1,8902,22433,6713,175,58752,64529,538,99362,15774,594,58361,27,5659,64277,59830,175,30033,43071,538,29478,95645,408,62566,30,66095,58361,95,95645,95]},awcbb_yhh_fun211:{variablePool:{t:null},zhili:[15348,57979,175,72238,34933,47300,175,87026,58361,738,57404,47300,133,74112,27,52431,61752,78762,175,29478,62566,754,52050,67404,133,58960,27,11125,44194,95]},awcbb_yhh_fun212:{variablePool:{t:null,e:null,awcbb_yhh_fun213:"awcbb_yhh_fun213"},zhili:[80796,961,26779,62566,175,97059,218,23282,85408,64529,961,58854,54009,95311,0,87026,27370,0,74112,145,52396,52058,1,19853,15774,841,52396,89391,1,31787,0,25284]},awcbb_yhh_fun213:{variablePool:{n:null,o:null,i:null,_i:null,awcbb_yhh_fun214:"awcbb_yhh_fun214"},zhili:[97059,959,87017,38549,64277,46212,959,71658,5497,44194,960,52050,59830,95,48503,39834,64954,425,77,38959,532,84214,32736,85408,44194,0,52980,29585,0,7823,52,22541,59324,4,2029]},awcbb_yhh_fun214:{variablePool:{_context2:null,awcbb_yhh_fun215:"awcbb_yhh_fun215"},zhili:[64954,947,5169,97059,942,86572,60605,119,70788,640,19853,62566,942,93963,78762,172,33414,58361,942,30033,60605,65,57404,27557,34933,47300,942,30033,15774,172,29478,62566,27,46212,124,18922,535,80796,415,59830,532,59830,944,6713,185,41646,835,80796,27,43071,954,97059,291,46212,955,95645,956,59830,957,64529,957,7823,958,76321,8,11765,80796,175,87026,43071,89,23098,9820,253,71807,30732,78e3,14,5497,95645,175,23098,74112,89,10057,47300,253,23098,59830,254,58854,12569,76,11078,2,52878,11,88198,44194,942,50073,32545,65,58361,124,25655,47329,540,43071,943,2219,64529,634,93963,40180,843,58752,41411,1,95311,66,5497,46212,942,93963,43071,100,58854,69977,1,45607,57979,497,5497,44194,218,10810,98092,32275,16,43071,27,18087,6713,175,58752,18922,89,72389,64954,495,563,97059,133,29478,84774,5185,33970,26,95311,776,85408,43071,218,29478,24607,45295,16,26653,27,44588,64529,175,71658,59830,89,22745,40180,778,77380,64954,133,3697,59,77519,5428,88345,2,29908,11,85408,32545,942,72389,64954,65,64954,944,66095,47329,447,40180,761,64277,43071,175,57404,95311,89,58752,46212,495,77380,7716,17054,7,38959,497,84670,15774,218,22745,3414,92021,11383,2,18631,11,58511,9820,942,10810,32545,65,7823,535,85445,64476,409,30506,64954,175,30033,64954,89,52396,74112,515,23098,64529,133,47300,27,53012,64147,64954,942,57404,95311,65,47300,944,11125,77765,383,34933,78762,247,26653,30,7286,64954,622,59830,527,72623,64954,175,71658,26653,267,10057,76020,2,94831,26653,945,63531,80796,175,52050,43071,293,93963,98239,1,5497,44194,225,74112,30,4623,59830,497,58511,47300,218,61091,16623,78395,46,60605,927,23185,30,85728,62566,175,99362,43071,267,43193,95012,2,9196,64147,58361,225,84670,18922,175,563,78762,89,61091,32545,778,22745,85756,85408,80796,247,8414,64954,175,52050,74112,89,52396,32545,778,52980,52431,74112,95,55098,82,63531,78762,135,67404,124,99128,2219,59830,175,93963,6713,89,72583,95311,515,72389,43071,176,30033,91504,1,8080,95645,27,19853,46212,135,93963,58361,133,50073,85089,4411,13,33414,97059,947,52980,58511,32545,135,29478,44194,440,52980,29362,1,61752,18922,247,11765,7823,175,58854,95645,89,3697,6713,495,58854,7286,53596,95645,225,53596,43071,175,58854,7823,89,99362,23185,495,71658,72623,67404,225,61091,43314,10414,74112,95,63531,67404,225,5497,46212,225,71807,9820,948,22745,41411,0,52431,58361,949,7823,27,23185,119,72623,41646,225,52980,60605,418,87026,31162,2,14731,21450,15,11765,26653,225,11765,64529,225,71658,64954,337,53501,5659,9103,32545,225,52980,64529,95,85408,44194,942,48530,80796,172,15774,425,87350,11765,95311,942,52980,95645,65,97059,415,38354,64954,950,1443,9820,408,33414,95311,225,71807,24361,32545,48,54009,40180,218,52050,97453,57979,951,72623,67404,247,82317,34264,63531,18922,355,71807,7823,610,77380,63363,2,38302,19853,78762,952,8414,58960,942,563,60605,102,58752,52821,44194,776,5497,38959,218,58854,39567,25595,30,26653,925,36422,64529,294,85728,23185,175,52396,7823,89,58854,58361,495,72389,44605,7823,48,78762,36,61744,19853,26653,175,52396,74112,267,53955,95012,2,33414,74112,942,99362,18922,65,62566,944,38232,73062,48,85728,23185,942,10810,58960,172,6713,532,8080,61752,67404,942,50073,62566,953,67404,425,64277,32545,942,30033,23185,199,57404,63363,1,80611,44194,298,88198,74112,175,52396,43071,267,10810,31162,1,67170,58960,942,71658,97059,178,77380,68185,0,67717,37455,-644]},awcbb_yhh_fun215:{variablePool:{d:null},zhili:[64106,40180,229,9832,18087,64954,225,22433,97059,229,87026,64529,294,52050,38959,946,86379,2219,62566,225,3697,9307,55019]},awcbb_yhh_fun216:{variablePool:{t:null,awcbb_yhh_fun217:"awcbb_yhh_fun217"},zhili:[97059,963,66887,67404,175,72238,78762,27,68343,26653,175,10057,44194,89,52980,26653,730,77380,97059,133,23098,39903,8578,88345,2,86650,130,64277,64954,175,50073,80796,89,43193,62566,253,74470,20757,10826,14,19853,15774,175,74470,32545,89,71807,47300,253,58752,44194,254,61091,6568,68180,2,63144,12,47300,943,11765,6713,634,3697,64529,843,72583,5879,1,58735,32545,622,47300,527,2219,32545,175,48530,38959,267,72583,74677,2,45196,62566,945,80129,62566,175,72583,67404,293,77380,59324,1,75284,11765,40180,175,58854,95645,89,93963,46212,738,74470,23185,133,62566,27,87350,64147,6713,175,43193,9820,89,43193,62566,754,97059,27,28050,11765,40180,963,82317,43071,964,68343,80796,175,10810,64954,89,52980,47300,730,50073,74112,965,83813,28156,61752,60605,355,10810,59830,349,50073,90610,1,32545,199,58854,27370,1]},awcbb_yhh_fun217:{variablePool:{e:null},zhili:[4062,44194,218,9832,7823,298,18087,15774,175,50073,40180,267,74470,72218,1]},awcbb_yhh_fun218:{variablePool:{t:null,e:null,awcbb_yhh_fun219:"awcbb_yhh_fun219"},zhili:[58960,989,5169,58960,175,57979,218,80124,58511,78762,989,61091,33414,60605,0,58854,7790,0,78762,145,43193,58767,1,67170,9820,841,58854,45100,1,2732,0,35812]},awcbb_yhh_fun219:{variablePool:{n:null,a:null,awcbb_yhh_fun220:"awcbb_yhh_fun220"},zhili:[46212,987,32517,33529,84670,80796,987,58752,22433,44194,988,61091,54009,44194,0,29478,27118,0,58960,52,72389,47409,2,74054]},awcbb_yhh_fun220:{variablePool:{_context3:null,awcbb_yhh_fun221:"awcbb_yhh_fun221"},zhili:[32545,981,43254,9820,968,84405,95311,119,68477,213,84670,7823,968,23098,97059,172,2219,95311,968,77380,47300,65,99362,27557,63531,43071,968,93963,97059,172,72583,80796,27,6713,121,57979,747,7823,185,7823,835,64954,27,6713,984,9820,985,26653,985,64529,986,49906,5,84670,64529,968,30033,9820,65,97059,121,82315,64277,7823,218,48530,7823,969,22541,95645,970,79230,7651,26653,355,22745,59830,349,58752,59324,1,48422,18087,26653,247,68343,40180,968,43193,26653,102,58752,52821,7823,298,80129,40180,175,53955,41646,267,87026,74677,1,58511,80796,224,22433,38959,247,61091,7823,73,52396,32545,698,29478,53596,15774,981,43193,27118,1,66095,78762,27,11368,54009,78762,247,99362,40180,73,10810,74112,48,82317,24867,11078,37,61752,23185,175,30033,44194,89,22541,59830,495,67404,30,27557,9103,57979,247,10057,40180,73,58752,60605,698,22745,25128,2,6713,982,85728,59830,634,43193,80796,843,71658,91090,1,6341,34,44194,925,14266,44194,294,33414,41646,224,50073,33352,64954,48,18922,260,24361,40180,175,85408,46212,218,10810,9820,983,71807,96762,8414,62566,175,58854,60605,267,43193,29585,2,53596,97059,968,22541,23185,178,93963,55207,0,2504,55513,-217]},awcbb_yhh_fun221:{variablePool:{i:null},zhili:[66932,80796,135,50825,46702,971,334,59830,972,46702,973,334,7823,974,14608,975,334,7823,976,74154,977,334,60605,978,25728,979,334,15774,980,11765,40180,135,52050,59830,335,10057,31162,2,78762,335,50073,29585,2,74112,335,3697,31162,2,47300,335,29478,5879,2,74112,335,72389,72218,2,38302]},awcbb_yhh_fun222:{variablePool:{t:null,e:null,awcbb_yhh_fun223:"awcbb_yhh_fun223"},zhili:[6713,999,3306,74112,175,80796,218,55173,64147,59830,999,72389,2219,32545,0,72583,29362,0,67404,145,71807,76020,1,53596,6713,841,52396,95012,1,76020,0,51512]},awcbb_yhh_fun223:{variablePool:{n:null,o:null,a:null,i:null,awcbb_yhh_fun224:"awcbb_yhh_fun224"},zhili:[64529,997,26472,27863,11765,78762,997,72583,2219,46212,998,10057,64277,74112,0,22745,940,0,62566,52,99362,27370,2,51512]},awcbb_yhh_fun224:{variablePool:{_context4:null},zhili:[32517,64954,992,55173,95645,119,6905,231,11765,44194,992,77380,67404,172,30506,43071,992,53955,95645,65,72583,28050,84670,43071,992,53955,9820,172,58752,23185,27,58960,121,80796,314,9820,185,78762,835,18922,27,58960,994,97059,995,32545,995,62566,996,59260,5,5497,59830,992,10810,97059,65,67404,121,93438,72623,23185,218,58752,46212,993,79230,64147,43071,355,61091,15774,349,29478,45100,1,93555,22433,64954,247,54009,64529,992,52980,80796,102,52050,8080,80129,47300,247,99362,25219,2,52189,131,38959,932,18087,7823,247,22745,58361,73,77380,72623,57979,175,87026,78762,267,61091,27118,2,11765,57979,225,61752,26653,247,30033,95311,73,77380,8080,19853,7823,224,19853,67404,429,22745,1390,0,10733,58960,95,72623,7823,225,563,10217,23185,27,34933,25791,165,85408,43071,165,74470,8414,7823,166,87026,74112,133,93963,62435,1295,39,18087,74112,135,30506,26653,166,10057,67170,78762,165,82317,72583,35894,58511,32545,225,48530,54009,74112,135,57404,71807,47300,750,50073,53596,64529,224,30033,23185,126,52050,2732,1,18087,38924,165,37455,-53,9820,934,64277,15774,224,58854,7651,95311,175,52050,26653,267,77380,29362,2,84581,64529,298,85728,23185,175,22541,74112,267,74470,68185,1,33414,6713,992,43193,40180,178,74470,69977,0,10515,63144,-235]},awcbb_yhh_fun225:{variablePool:{t:null,awcbb_yhh_fun226:"awcbb_yhh_fun226",awcbb_yhh_fun227:"awcbb_yhh_fun227"},zhili:[67404,1006,58361,1009,2158,78762,175,9832,7651,74112,175,10810,38959,89,23098,74112,536,71658,44194,593,11765,38959,1006,53955,15774,532,43071,307,51683,61752,59830,616,48530,68185,2,38232,30506,57979,175,74470,60605,89,52050,58960,538,563,58960,593,64153,38959,1009,22541,47300,307,19853,80796,616,58752,29362,2,53012,46212,95]},awcbb_yhh_fun226:{variablePool:{},zhili:[68149,21418,9103,23185,175,71658,62566,89,52050,44194,536,30033,43071,408,52396,58960,133,57404,5497,46212,175,29478,23185,89,53955,40180,536,53955,32545,917,50073,46279,21105,44,58511,80796,175,77380,32545,89,71807,64954,536,99362,64529,917,43071,27,78309,54009,41646,175,71807,59830,89,74470,46212,536,52396,58960,919,95645,119,80129,62566,175,43193,23185,89,77380,9820,536,93963,95645,919,99362,13263,47761,74112,1001,64153,23185,175,87026,80796,89,43193,58960,536,87026,15774,919,52980,97935,2977,35,83916,26653,1002,38959,1003,96225,95645,273,80796,1004,61744,80129,95311,1005,3697,45100,1,15276,64277,67404,175,3697,26653,89,93963,62566,536,58854,62566,919,59830,27,8080,59830,95,54009,64954,175,10810,60605,89,30033,97059,536,58854,78762,537,68343,60605,175,10057,18922,89,93963,97059,536,57404,67404,408,22745,58511,58960,175,30033,58960,89,74470,57979,536,57404,58361,917,30033,10810,78309,7651,80796,175,87026,58960,89,53955,60605,536,10057,74112,917,60605,119,61752,59830,175,77380,95645,89,22541,23185,536,82317,47300,917,43193,19919,81e3]},awcbb_yhh_fun227:{variablePool:{},zhili:[12239,72115,5497,78762,175,57404,41646,89,10057,41646,538,74470,62566,594,47300,119,19853,7823,175,58752,95645,89,22541,67404,538,74470,23185,594,77380,13263,47391,9103,26653,175,61091,64529,89,71658,44194,538,53955,64954,408,85728,44194,175,71658,58960,89,93963,46212,538,52980,95311,594,58752,5497,46212,284,52396,64529,357,10810,89391,1,41646,1007,32063,85445,15774,816,19853,58361,175,71807,44194,89,71658,15774,538,52396,46212,594,61091,38480,21105,42,76204,41646,175,52050,46212,89,87026,62566,538,29478,97059,408,32545,1008,30506,74112,175,72389,74112,89,23098,32545,538,53955,64954,408,22541,68644,84758,64147,7823,175,563,7823,89,10810,38959,538,58752,80796,408,61091,18922,95]},awcbb_yhh_fun228:{variablePool:{t:null,e:null,awcbb_yhh_fun229:"awcbb_yhh_fun229"},zhili:[80796,1019,39441,64529,175,59830,218,4004,19853,26653,1019,93963,80129,67404,0,52980,27370,0,32545,145,48530,69977,1,9103,62566,841,50073,52058,1,31787,0,45607]},awcbb_yhh_fun229:{variablePool:{n:null,o:null,awcbb_yhh_fun230:"awcbb_yhh_fun230"},zhili:[64529,1017,33338,98809,2219,80796,1017,52396,88198,46212,1018,30033,33414,47300,0,10057,58767,0,95311,52,22541,29362,2,27484]},awcbb_yhh_fun230:{variablePool:{_context5:null,awcbb_yhh_fun231:"awcbb_yhh_fun231"},zhili:[26653,1014,43254,26653,1012,23282,18922,119,20362,197,85728,38959,1012,3697,57979,172,64147,40180,1012,61091,58960,65,30033,85445,67170,41646,1012,99362,7823,172,58854,62566,27,95311,121,26653,314,67404,185,62566,835,64529,27,18922,994,38959,1015,59830,1015,67404,1016,53299,5,85728,64529,1012,71807,95311,65,58960,121,28050,84670,41646,218,22745,23185,1013,27429,34933,59830,355,71807,67404,349,57404,29585,1,58735,19853,78762,247,30506,41646,1012,43193,60605,102,72583,80837,64277,41646,247,22745,80958,7,85408,64529,247,3697,43071,73,3697,67772,2,67806,88,85408,46212,247,30033,44194,73,61091,42859,80796,27,68343,49304,165,2219,46212,165,52050,64147,97059,166,53955,64529,133,10057,20615,26420,46,9103,44194,225,84670,41646,166,23098,11765,23185,165,99362,58752,20350,61752,67404,225,71658,54009,26653,247,48530,64954,73,10810,64153,78762,225,52980,74470,8414,6713,284,563,46212,285,99362,32545,352,77380,47409,2,85728,38924,165,18564,-60,72623,60605,1014,58854,95645,124,95311,307,64110,63531,41646,860,74470,29585,2,30506,80796,1012,52980,43071,178,53955,59324,0,41373,6341,-201]},awcbb_yhh_fun231:{variablePool:{},zhili:[78808,48183,58511,38959,866,52980,78762,867,3697,64529,868,82317,5879,0]}};function cshduei(){this.cf=function(){var b=this.gx[this.s][2];return this.s=this.gx[this.s][0],--this.CFf,b},this.cF=function(b){return this.s=this.gx[this.s][1],this.gx[this.s][2]=b,this.CFf+=1,b},this.sf=function(){var b;return this.CFf<1?10:(b=this.gx[this.s][2],this.s=this.gx[this.s][0],this.CFf--,this.s=this.gx[this.s][1],this.gx[this.s][2]=b,this.CFf+=1,101)},this.Cf=function(){var b,l,c=this.s;for(--this.CFf,this.s=this.gx[this.s][0],l=this.gx[c][2];;){if(c===this.gx.cbb1[1])break;b=this.gx[this.gx[c][0]][2],this.gx[this.gx[c][0]][2]=l,l=b,c=this.gx[c][0]}return l},this.gx={cbb1:[void 0,"cbb47",void 0],cbb47:["cbb1","cbb195",void 0],cbb195:["cbb47","cbb712",void 0],cbb712:["cbb195","cbb780",void 0],cbb780:["cbb712","cbb603",void 0],cbb603:["cbb780","cbb658",void 0],cbb658:["cbb603","cbb333",void 0],cbb333:["cbb658","cbb868",void 0],cbb868:["cbb333","cbb510",void 0],cbb510:["cbb868","cbb906",void 0],cbb906:["cbb510","cbb516",void 0],cbb516:["cbb906","cbb735",void 0],cbb735:["cbb516","cbb99",void 0],cbb99:["cbb735","cbb53",void 0],cbb53:["cbb99","cbb805",void 0],cbb805:["cbb53","cbb602",void 0],cbb602:["cbb805","cbb791",void 0],cbb791:["cbb602","cbb461",void 0],cbb461:["cbb791","cbb257",void 0],cbb257:["cbb461","cbb501",void 0],cbb501:["cbb257","cbb720",void 0],cbb720:["cbb501","cbb49",void 0],cbb49:["cbb720","cbb377",void 0],cbb377:["cbb49","cbb926",void 0],cbb926:["cbb377","cbb725",void 0],cbb725:["cbb926","cbb684",void 0],cbb684:["cbb725","cbb880",void 0],cbb880:["cbb684","cbb427",void 0],cbb427:["cbb880","cbb724",void 0],cbb724:["cbb427","cbb265",void 0],cbb265:["cbb724","cbb798",void 0],cbb798:["cbb265","cbb813",void 0],cbb813:["cbb798","cbb828",void 0],cbb828:["cbb813","cbb592",void 0],cbb592:["cbb828","cbb934",void 0],cbb934:["cbb592","cbb795",void 0],cbb795:["cbb934","cbb417",void 0],cbb417:["cbb795","cbb81",void 0],cbb81:["cbb417","cbb555",void 0],cbb555:["cbb81","cbb862",void 0],cbb862:["cbb555","cbb452",void 0],cbb452:["cbb862","cbb851",void 0],cbb851:["cbb452","cbb764",void 0],cbb764:["cbb851","cbb661",void 0],cbb661:["cbb764","cbb899",void 0],cbb899:["cbb661","cbb108",void 0],cbb108:["cbb899","cbb665",void 0],cbb665:["cbb108","cbb245",void 0],cbb245:["cbb665","cbb97",void 0],cbb97:["cbb245","cbb830",void 0],cbb830:["cbb97","cbb643",void 0],cbb643:["cbb830","cbb104",void 0],cbb104:["cbb643","cbb877",void 0],cbb877:["cbb104","cbb856",void 0],cbb856:["cbb877","cbb155",void 0],cbb155:["cbb856","cbb321",void 0],cbb321:["cbb155","cbb546",void 0],cbb546:["cbb321","cbb40",void 0],cbb40:["cbb546","cbb67",void 0],cbb67:["cbb40","cbb139",void 0],cbb139:["cbb67","cbb212",void 0],cbb212:["cbb139","cbb123",void 0],cbb123:["cbb212","cbb894",void 0],cbb894:["cbb123","cbb325",void 0],cbb325:["cbb894","cbb137",void 0],cbb137:["cbb325","cbb349",void 0],cbb349:["cbb137","cbb142",void 0],cbb142:["cbb349","cbb360",void 0],cbb360:["cbb142","cbb395",void 0],cbb395:["cbb360","cbb686",void 0],cbb686:["cbb395","cbb657",void 0],cbb657:["cbb686","cbb38",void 0],cbb38:["cbb657","cbb853",void 0],cbb853:["cbb38","cbb198",void 0],cbb198:["cbb853","cbb282",void 0],cbb282:["cbb198","cbb15",void 0],cbb15:["cbb282","cbb758",void 0],cbb758:["cbb15","cbb752",void 0],cbb752:["cbb758","cbb671",void 0],cbb671:["cbb752","cbb244",void 0],cbb244:["cbb671","cbb34",void 0],cbb34:["cbb244","cbb891",void 0],cbb891:["cbb34","cbb802",void 0],cbb802:["cbb891","cbb974",void 0],cbb974:["cbb802","cbb180",void 0],cbb180:["cbb974","cbb252",void 0],cbb252:["cbb180","cbb977",void 0],cbb977:["cbb252","cbb615",void 0],cbb615:["cbb977","cbb220",void 0],cbb220:["cbb615","cbb430",void 0],cbb430:["cbb220","cbb769",void 0],cbb769:["cbb430","cbb825",void 0],cbb825:["cbb769","cbb17",void 0],cbb17:["cbb825","cbb246",void 0],cbb246:["cbb17","cbb649",void 0],cbb649:["cbb246","cbb505",void 0],cbb505:["cbb649","cbb77",void 0],cbb77:["cbb505","cbb972",void 0],cbb972:["cbb77","cbb762",void 0],cbb762:["cbb972","cbb483",void 0],cbb483:["cbb762","cbb893",void 0],cbb893:["cbb483","cbb376",void 0],cbb376:["cbb893","cbb529",void 0],cbb529:["cbb376","cbb964",void 0],cbb964:["cbb529","cbb514",void 0],cbb514:["cbb964","cbb554",void 0],cbb554:["cbb514","cbb767",void 0],cbb767:["cbb554","cbb837",void 0],cbb837:["cbb767","cbb928",void 0],cbb928:["cbb837","cbb456",void 0],cbb456:["cbb928","cbb531",void 0],cbb531:["cbb456","cbb632",void 0],cbb632:["cbb531","cbb648",void 0],cbb648:["cbb632","cbb600",void 0],cbb600:["cbb648","cbb570",void 0],cbb570:["cbb600","cbb194",void 0],cbb194:["cbb570","cbb996",void 0],cbb996:["cbb194","cbb909",void 0],cbb909:["cbb996","cbb30",void 0],cbb30:["cbb909","cbb247",void 0],cbb247:["cbb30","cbb113",void 0],cbb113:["cbb247","cbb900",void 0],cbb900:["cbb113","cbb160",void 0],cbb160:["cbb900","cbb273",void 0],cbb273:["cbb160","cbb128",void 0],cbb128:["cbb273","cbb307",void 0],cbb307:["cbb128","cbb558",void 0],cbb558:["cbb307","cbb253",void 0],cbb253:["cbb558","cbb359",void 0],cbb359:["cbb253","cbb826",void 0],cbb826:["cbb359","cbb751",void 0],cbb751:["cbb826","cbb740",void 0],cbb740:["cbb751","cbb217",void 0],cbb217:["cbb740","cbb855",void 0],cbb855:["cbb217","cbb167",void 0],cbb167:["cbb855","cbb405",void 0],cbb405:["cbb167","cbb806",void 0],cbb806:["cbb405","cbb493",void 0],cbb493:["cbb806","cbb587",void 0],cbb587:["cbb493","cbb385",void 0],cbb385:["cbb587","cbb400",void 0],cbb400:["cbb385","cbb663",void 0],cbb663:["cbb400","cbb528",void 0],cbb528:["cbb663","cbb593",void 0],cbb593:["cbb528","cbb214",void 0],cbb214:["cbb593","cbb929",void 0],cbb929:["cbb214","cbb571",void 0],cbb571:["cbb929","cbb604",void 0],cbb604:["cbb571","cbb982",void 0],cbb982:["cbb604","cbb563",void 0],cbb563:["cbb982","cbb491",void 0],cbb491:["cbb563","cbb51",void 0],cbb51:["cbb491","cbb614",void 0],cbb614:["cbb51","cbb54",void 0],cbb54:["cbb614","cbb185",void 0],cbb185:["cbb54","cbb199",void 0],cbb199:["cbb185","cbb358",void 0],cbb358:["cbb199","cbb878",void 0],cbb878:["cbb358","cbb832",void 0],cbb832:["cbb878","cbb701",void 0],cbb701:["cbb832","cbb196",void 0],cbb196:["cbb701","cbb989",void 0],cbb989:["cbb196","cbb904",void 0],cbb904:["cbb989","cbb275",void 0],cbb275:["cbb904","cbb342",void 0],cbb342:["cbb275","cbb992",void 0],cbb992:["cbb342","cbb646",void 0],cbb646:["cbb992","cbb656",void 0],cbb656:["cbb646","cbb177",void 0],cbb177:["cbb656","cbb598",void 0],cbb598:["cbb177","cbb63",void 0],cbb63:["cbb598","cbb843",void 0],cbb843:["cbb63","cbb250",void 0],cbb250:["cbb843","cbb530",void 0],cbb530:["cbb250","cbb968",void 0],cbb968:["cbb530","cbb283",void 0],cbb283:["cbb968","cbb234",void 0],cbb234:["cbb283","cbb314",void 0],cbb314:["cbb234","cbb268",void 0],cbb268:["cbb314","cbb889",void 0],cbb889:["cbb268","cbb578",void 0],cbb578:["cbb889","cbb186",void 0],cbb186:["cbb578","cbb158",void 0],cbb158:["cbb186","cbb962",void 0],cbb962:["cbb158","cbb736",void 0],cbb736:["cbb962","cbb432",void 0],cbb432:["cbb736","cbb508",void 0],cbb508:["cbb432","cbb5",void 0],cbb5:["cbb508","cbb396",void 0],cbb396:["cbb5","cbb487",void 0],cbb487:["cbb396","cbb547",void 0],cbb547:["cbb487","cbb777",void 0],cbb777:["cbb547","cbb309",void 0],cbb309:["cbb777","cbb433",void 0],cbb433:["cbb309","cbb169",void 0],cbb169:["cbb433","cbb943",void 0],cbb943:["cbb169","cbb594",void 0],cbb594:["cbb943","cbb886",void 0],cbb886:["cbb594","cbb140",void 0],cbb140:["cbb886","cbb372",void 0],cbb372:["cbb140","cbb970",void 0],cbb970:["cbb372","cbb312",void 0],cbb312:["cbb970","cbb951",void 0],cbb951:["cbb312","cbb339",void 0],cbb339:["cbb951","cbb635",void 0],cbb635:["cbb339","cbb675",void 0],cbb675:["cbb635","cbb153",void 0],cbb153:["cbb675","cbb370",void 0],cbb370:["cbb153","cbb148",void 0],cbb148:["cbb370","cbb270",void 0],cbb270:["cbb148","cbb326",void 0],cbb326:["cbb270","cbb875",void 0],cbb875:["cbb326","cbb8",void 0],cbb8:["cbb875","cbb757",void 0],cbb757:["cbb8","cbb755",void 0],cbb755:["cbb757","cbb446",void 0],cbb446:["cbb755","cbb726",void 0],cbb726:["cbb446","cbb423",void 0],cbb423:["cbb726","cbb693",void 0],cbb693:["cbb423","cbb7",void 0],cbb7:["cbb693","cbb515",void 0],cbb515:["cbb7","cbb903",void 0],cbb903:["cbb515","cbb179",void 0],cbb179:["cbb903","cbb145",void 0],cbb145:["cbb179","cbb710",void 0],cbb710:["cbb145","cbb539",void 0],cbb539:["cbb710","cbb753",void 0],cbb753:["cbb539","cbb523",void 0],cbb523:["cbb753","cbb386",void 0],cbb386:["cbb523","cbb846",void 0],cbb846:["cbb386","cbb499",void 0],cbb499:["cbb846","cbb221",void 0],cbb221:["cbb499","cbb301",void 0],cbb301:["cbb221","cbb482",void 0],cbb482:["cbb301","cbb652",void 0],cbb652:["cbb482","cbb387",void 0],cbb387:["cbb652","cbb116",void 0],cbb116:["cbb387","cbb642",void 0],cbb642:["cbb116","cbb363",void 0],cbb363:["cbb642","cbb822",void 0],cbb822:["cbb363","cbb706",void 0],cbb706:["cbb822","cbb616",void 0],cbb616:["cbb706","cbb86",void 0],cbb86:["cbb616","cbb714",void 0],cbb714:["cbb86","cbb797",void 0],cbb797:["cbb714","cbb288",void 0],cbb288:["cbb797","cbb219",void 0],cbb219:["cbb288","cbb786",void 0],cbb786:["cbb219","cbb475",void 0],cbb475:["cbb786","cbb654",void 0],cbb654:["cbb475","cbb973",void 0],cbb973:["cbb654","cbb739",void 0],cbb739:["cbb973","cbb549",void 0],cbb549:["cbb739","cbb641",void 0],cbb641:["cbb549","cbb111",void 0],cbb111:["cbb641","cbb500",void 0],cbb500:["cbb111","cbb591",void 0],cbb591:["cbb500","cbb662",void 0],cbb662:["cbb591","cbb747",void 0],cbb747:["cbb662","cbb678",void 0],cbb678:["cbb747","cbb979",void 0],cbb979:["cbb678","cbb994",void 0],cbb994:["cbb979","cbb597",void 0],cbb597:["cbb994","cbb33",void 0],cbb33:["cbb597","cbb577",void 0],cbb577:["cbb33","cbb454",void 0],cbb454:["cbb577","cbb543",void 0],cbb543:["cbb454","cbb936",void 0],cbb936:["cbb543","cbb823",void 0],cbb823:["cbb936","cbb653",void 0],cbb653:["cbb823","cbb187",void 0],cbb187:["cbb653","cbb809",void 0],cbb809:["cbb187","cbb126",void 0],cbb126:["cbb809","cbb12",void 0],cbb12:["cbb126","cbb580",void 0],cbb580:["cbb12","cbb18",void 0],cbb18:["cbb580","cbb898",void 0],cbb898:["cbb18","cbb772",void 0],cbb772:["cbb898","cbb759",void 0],cbb759:["cbb772","cbb953",void 0],cbb953:["cbb759","cbb833",void 0],cbb833:["cbb953","cbb476",void 0],cbb476:["cbb833","cbb789",void 0],cbb789:["cbb476","cbb121",void 0],cbb121:["cbb789","cbb146",void 0],cbb146:["cbb121","cbb9",void 0],cbb9:["cbb146","cbb776",void 0],cbb776:["cbb9","cbb507",void 0],cbb507:["cbb776","cbb238",void 0],cbb238:["cbb507","cbb651",void 0],cbb651:["cbb238","cbb328",void 0],cbb328:["cbb651","cbb406",void 0],cbb406:["cbb328","cbb278",void 0],cbb278:["cbb406","cbb193",void 0],cbb193:["cbb278","cbb46",void 0],cbb46:["cbb193","cbb357",void 0],cbb357:["cbb46","cbb129",void 0],cbb129:["cbb357","cbb399",void 0],cbb399:["cbb129","cbb168",void 0],cbb168:["cbb399","cbb874",void 0],cbb874:["cbb168","cbb305",void 0],cbb305:["cbb874","cbb488",void 0],cbb488:["cbb305","cbb161",void 0],cbb161:["cbb488","cbb698",void 0],cbb698:["cbb161","cbb118",void 0],cbb118:["cbb698","cbb586",void 0],cbb586:["cbb118","cbb572",void 0],cbb572:["cbb586","cbb629",void 0],cbb629:["cbb572","cbb469",void 0],cbb469:["cbb629","cbb612",void 0],cbb612:["cbb469","cbb829",void 0],cbb829:["cbb612","cbb746",void 0],cbb746:["cbb829","cbb793",void 0],cbb793:["cbb746","cbb869",void 0],cbb869:["cbb793","cbb114",void 0],cbb114:["cbb869","cbb854",void 0],cbb854:["cbb114","cbb176",void 0],cbb176:["cbb854","cbb190",void 0],cbb190:["cbb176","cbb197",void 0],cbb197:["cbb190","cbb836",void 0],cbb836:["cbb197","cbb466",void 0],cbb466:["cbb836","cbb203",void 0],cbb203:["cbb466","cbb354",void 0],cbb354:["cbb203","cbb966",void 0],cbb966:["cbb354","cbb768",void 0],cbb768:["cbb966","cbb284",void 0],cbb284:["cbb768","cbb392",void 0],cbb392:["cbb284","cbb69",void 0],cbb69:["cbb392","cbb674",void 0],cbb674:["cbb69","cbb556",void 0],cbb556:["cbb674","cbb770",void 0],cbb770:["cbb556","cbb74",void 0],cbb74:["cbb770","cbb839",void 0],cbb839:["cbb74","cbb164",void 0],cbb164:["cbb839","cbb861",void 0],cbb861:["cbb164","cbb462",void 0],cbb462:["cbb861","cbb106",void 0],cbb106:["cbb462","cbb589",void 0],cbb589:["cbb106","cbb486",void 0],cbb486:["cbb589","cbb913",void 0],cbb913:["cbb486","cbb704",void 0],cbb704:["cbb913","cbb778",void 0],cbb778:["cbb704","cbb332",void 0],cbb332:["cbb778","cbb471",void 0],cbb471:["cbb332","cbb241",void 0],cbb241:["cbb471","cbb318",void 0],cbb318:["cbb241","cbb519",void 0],cbb519:["cbb318","cbb89",void 0],cbb89:["cbb519","cbb119",void 0],cbb119:["cbb89","cbb695",void 0],cbb695:["cbb119","cbb258",void 0],cbb258:["cbb695","cbb165",void 0],cbb165:["cbb258","cbb316",void 0],cbb316:["cbb165","cbb626",void 0],cbb626:["cbb316","cbb172",void 0],cbb172:["cbb626","cbb216",void 0],cbb216:["cbb172","cbb442",void 0],cbb442:["cbb216","cbb228",void 0],cbb228:["cbb442","cbb463",void 0],cbb463:["cbb228","cbb867",void 0],cbb867:["cbb463","cbb13",void 0],cbb13:["cbb867","cbb150",void 0],cbb150:["cbb13","cbb683",void 0],cbb683:["cbb150","cbb31",void 0],cbb31:["cbb683","cbb754",void 0],cbb754:["cbb31","cbb91",void 0],cbb91:["cbb754","cbb545",void 0],cbb545:["cbb91","cbb969",void 0],cbb969:["cbb545","cbb610",void 0],cbb610:["cbb969","cbb14",void 0],cbb14:["cbb610","cbb402",void 0],cbb402:["cbb14","cbb685",void 0],cbb685:["cbb402","cbb350",void 0],cbb350:["cbb685","cbb184",void 0],cbb184:["cbb350","cbb383",void 0],cbb383:["cbb184","cbb761",void 0],cbb761:["cbb383","cbb717",void 0],cbb717:["cbb761","cbb991",void 0],cbb991:["cbb717","cbb509",void 0],cbb509:["cbb991","cbb467",void 0],cbb467:["cbb509","cbb599",void 0],cbb599:["cbb467","cbb50",void 0],cbb50:["cbb599","cbb788",void 0],cbb788:["cbb50","cbb585",void 0],cbb585:["cbb788","cbb115",void 0],cbb115:["cbb585","cbb242",void 0],cbb242:["cbb115","cbb294",void 0],cbb294:["cbb242","cbb276",void 0],cbb276:["cbb294","cbb533",void 0],cbb533:["cbb276","cbb60",void 0],cbb60:["cbb533","cbb361",void 0],cbb361:["cbb60","cbb327",void 0],cbb327:["cbb361","cbb189",void 0],cbb189:["cbb327","cbb990",void 0],cbb990:["cbb189","cbb634",void 0],cbb634:["cbb990","cbb134",void 0],cbb134:["cbb634","cbb821",void 0],cbb821:["cbb134","cbb799",void 0],cbb799:["cbb821","cbb285",void 0],cbb285:["cbb799","cbb62",void 0],cbb62:["cbb285","cbb226",void 0],cbb226:["cbb62","cbb609",void 0],cbb609:["cbb226","cbb366",void 0],cbb366:["cbb609","cbb623",void 0],cbb623:["cbb366","cbb100",void 0],cbb100:["cbb623","cbb300",void 0],cbb300:["cbb100","cbb784",void 0],cbb784:["cbb300","cbb135",void 0],cbb135:["cbb784","cbb465",void 0],cbb465:["cbb135","cbb644",void 0],cbb644:["cbb465","cbb607",void 0],cbb607:["cbb644","cbb29",void 0],cbb29:["cbb607","cbb930",void 0],cbb930:["cbb29","cbb763",void 0],cbb763:["cbb930","cbb211",void 0],cbb211:["cbb763","cbb743",void 0],cbb743:["cbb211","cbb87",void 0],cbb87:["cbb743","cbb109",void 0],cbb109:["cbb87","cbb827",void 0],cbb827:["cbb109","cbb337",void 0],cbb337:["cbb827","cbb37",void 0],cbb37:["cbb337","cbb949",void 0],cbb949:["cbb37","cbb3",void 0],cbb3:["cbb949","cbb225",void 0],cbb225:["cbb3","cbb144",void 0],cbb144:["cbb225","cbb365",void 0],cbb365:["cbb144","cbb579",void 0],cbb579:["cbb365","cbb766",void 0],cbb766:["cbb579","cbb147",void 0],cbb147:["cbb766","cbb296",void 0],cbb296:["cbb147","cbb310",void 0],cbb310:["cbb296","cbb315",void 0],cbb315:["cbb310","cbb249",void 0],cbb249:["cbb315","cbb655",void 0],cbb655:["cbb249","cbb136",void 0],cbb136:["cbb655","cbb208",void 0],cbb208:["cbb136","cbb78",void 0],cbb78:["cbb208","cbb921",void 0],cbb921:["cbb78","cbb204",void 0],cbb204:["cbb921","cbb955",void 0],cbb955:["cbb204","cbb756",void 0],cbb756:["cbb955","cbb542",void 0],cbb542:["cbb756","cbb807",void 0],cbb807:["cbb542","cbb414",void 0],cbb414:["cbb807","cbb566",void 0],cbb566:["cbb414","cbb917",void 0],cbb917:["cbb566","cbb256",void 0],cbb256:["cbb917","cbb291",void 0],cbb291:["cbb256","cbb277",void 0],cbb277:["cbb291","cbb738",void 0],cbb738:["cbb277","cbb254",void 0],cbb254:["cbb738","cbb513",void 0],cbb513:["cbb254","cbb82",void 0],cbb82:["cbb513","cbb794",void 0],cbb794:["cbb82","cbb699",void 0],cbb699:["cbb794","cbb553",void 0],cbb553:["cbb699","cbb954",void 0],cbb954:["cbb553","cbb149",void 0],cbb149:["cbb954","cbb532",void 0],cbb532:["cbb149","cbb560",void 0],cbb560:["cbb532","cbb534",void 0],cbb534:["cbb560","cbb255",void 0],cbb255:["cbb534","cbb380",void 0],cbb380:["cbb255","cbb331",void 0],cbb331:["cbb380","cbb44",void 0],cbb44:["cbb331","cbb550",void 0],cbb550:["cbb44","cbb721",void 0],cbb721:["cbb550","cbb143",void 0],cbb143:["cbb721","cbb24",void 0],cbb24:["cbb143","cbb286",void 0],cbb286:["cbb24","cbb496",void 0],cbb496:["cbb286","cbb96",void 0],cbb96:["cbb496","cbb470",void 0],cbb470:["cbb96","cbb35",void 0],cbb35:["cbb470","cbb790",void 0],cbb790:["cbb35","cbb774",void 0],cbb774:["cbb790","cbb834",void 0],cbb834:["cbb774","cbb404",void 0],cbb404:["cbb834","cbb885",void 0],cbb885:["cbb404","cbb279",void 0],cbb279:["cbb885","cbb817",void 0],cbb817:["cbb279","cbb437",void 0],cbb437:["cbb817","cbb271",void 0],cbb271:["cbb437","cbb159",void 0],cbb159:["cbb271","cbb293",void 0],cbb293:["cbb159","cbb393",void 0],cbb393:["cbb293","cbb355",void 0],cbb355:["cbb393","cbb232",void 0],cbb232:["cbb355","cbb261",void 0],cbb261:["cbb232","cbb907",void 0],cbb907:["cbb261","cbb183",void 0],cbb183:["cbb907","cbb274",void 0],cbb274:["cbb183","cbb521",void 0],cbb521:["cbb274","cbb368",void 0],cbb368:["cbb521","cbb551",void 0],cbb551:["cbb368","cbb11",void 0],cbb11:["cbb551","cbb895",void 0],cbb895:["cbb11","cbb857",void 0],cbb857:["cbb895","cbb873",void 0],cbb873:["cbb857","cbb628",void 0],cbb628:["cbb873","cbb925",void 0],cbb925:["cbb628","cbb362",void 0],cbb362:["cbb925","cbb841",void 0],cbb841:["cbb362","cbb438",void 0],cbb438:["cbb841","cbb336",void 0],cbb336:["cbb438","cbb218",void 0],cbb218:["cbb336","cbb503",void 0],cbb503:["cbb218","cbb494",void 0],cbb494:["cbb503","cbb474",void 0],cbb474:["cbb494","cbb582",void 0],cbb582:["cbb474","cbb506",void 0],cbb506:["cbb582","cbb760",void 0],cbb760:["cbb506","cbb209",void 0],cbb209:["cbb760","cbb988",void 0],cbb988:["cbb209","cbb950",void 0],cbb950:["cbb988","cbb548",void 0],cbb548:["cbb950","cbb681",void 0],cbb681:["cbb548","cbb723",void 0],cbb723:["cbb681","cbb489",void 0],cbb489:["cbb723","cbb575",void 0],cbb575:["cbb489","cbb440",void 0],cbb440:["cbb575","cbb473",void 0],cbb473:["cbb440","cbb892",void 0],cbb892:["cbb473","cbb83",void 0],cbb83:["cbb892","cbb281",void 0],cbb281:["cbb83","cbb660",void 0],cbb660:["cbb281","cbb154",void 0],cbb154:["cbb660","cbb308",void 0],cbb308:["cbb154","cbb235",void 0],cbb235:["cbb308","cbb734",void 0],cbb734:["cbb235","cbb625",void 0],cbb625:["cbb734","cbb912",void 0],cbb912:["cbb625","cbb694",void 0],cbb694:["cbb912","cbb70",void 0],cbb70:["cbb694","cbb420",void 0],cbb420:["cbb70","cbb213",void 0],cbb213:["cbb420","cbb984",void 0],cbb984:["cbb213","cbb750",void 0],cbb750:["cbb984","cbb810",void 0],cbb810:["cbb750","cbb866",void 0],cbb866:["cbb810","cbb819",void 0],cbb819:["cbb866","cbb92",void 0],cbb92:["cbb819","cbb166",void 0],cbb166:["cbb92","cbb210",void 0],cbb210:["cbb166","cbb947",void 0],cbb947:["cbb210","cbb858",void 0],cbb858:["cbb947","cbb535",void 0],cbb535:["cbb858","cbb838",void 0],cbb838:["cbb535","cbb94",void 0],cbb94:["cbb838","cbb447",void 0],cbb447:["cbb94","cbb132",void 0],cbb132:["cbb447","cbb627",void 0],cbb627:["cbb132","cbb4",void 0],cbb4:["cbb627","cbb993",void 0],cbb993:["cbb4","cbb700",void 0],cbb700:["cbb993","cbb520",void 0],cbb520:["cbb700","cbb537",void 0],cbb537:["cbb520","cbb902",void 0],cbb902:["cbb537","cbb690",void 0],cbb690:["cbb902","cbb345",void 0],cbb345:["cbb690","cbb879",void 0],cbb879:["cbb345","cbb173",void 0],cbb173:["cbb879","cbb88",void 0],cbb88:["cbb173","cbb304",void 0],cbb304:["cbb88","cbb435",void 0],cbb435:["cbb304","cbb391",void 0],cbb391:["cbb435","cbb52",void 0],cbb52:["cbb391","cbb41",void 0],cbb41:["cbb52","cbb151",void 0],cbb151:["cbb41","cbb418",void 0],cbb418:["cbb151","cbb773",void 0],cbb773:["cbb418","cbb831",void 0],cbb831:["cbb773","cbb403",void 0],cbb403:["cbb831","cbb449",void 0],cbb449:["cbb403","cbb688",void 0],cbb688:["cbb449","cbb859",void 0],cbb859:["cbb688","cbb967",void 0],cbb967:["cbb859","cbb107",void 0],cbb107:["cbb967","cbb664",void 0],cbb664:["cbb107","cbb233",void 0],cbb233:["cbb664","cbb407",void 0],cbb407:["cbb233","cbb419",void 0],cbb419:["cbb407","cbb299",void 0],cbb299:["cbb419","cbb781",void 0],cbb781:["cbb299","cbb941",void 0],cbb941:["cbb781","cbb340",void 0],cbb340:["cbb941","cbb719",void 0],cbb719:["cbb340","cbb248",void 0],cbb248:["cbb719","cbb569",void 0],cbb569:["cbb248","cbb141",void 0],cbb141:["cbb569","cbb207",void 0],cbb207:["cbb141","cbb36",void 0],cbb36:["cbb207","cbb923",void 0],cbb923:["cbb36","cbb175",void 0],cbb175:["cbb923","cbb733",void 0],cbb733:["cbb175","cbb408",void 0],cbb408:["cbb733","cbb618",void 0],cbb618:["cbb408","cbb785",void 0],cbb785:["cbb618","cbb709",void 0],cbb709:["cbb785","cbb883",void 0],cbb883:["cbb709","cbb441",void 0],cbb441:["cbb883","cbb731",void 0],cbb731:["cbb441","cbb715",void 0],cbb715:["cbb731","cbb702",void 0],cbb702:["cbb715","cbb689",void 0],cbb689:["cbb702","cbb938",void 0],cbb938:["cbb689","cbb730",void 0],cbb730:["cbb938","cbb659",void 0],cbb659:["cbb730","cbb295",void 0],cbb295:["cbb659","cbb939",void 0],cbb939:["cbb295","cbb455",void 0],cbb455:["cbb939","cbb260",void 0],cbb260:["cbb455","cbb206",void 0],cbb206:["cbb260","cbb439",void 0],cbb439:["cbb206","cbb156",void 0],cbb156:["cbb439","cbb870",void 0],cbb870:["cbb156","cbb57",void 0],cbb57:["cbb870","cbb800",void 0],cbb800:["cbb57","cbb313",void 0],cbb313:["cbb800","cbb259",void 0],cbb259:["cbb313","cbb986",void 0],cbb986:["cbb259","cbb576",void 0],cbb576:["cbb986","cbb388",void 0],cbb388:["cbb576","cbb192",void 0],cbb192:["cbb388","cbb68",void 0],cbb68:["cbb192","cbb71",void 0],cbb71:["cbb68","cbb727",void 0],cbb727:["cbb71","cbb544",void 0],cbb544:["cbb727","cbb595",void 0],cbb595:["cbb544","cbb611",void 0],cbb611:["cbb595","cbb429",void 0],cbb429:["cbb611","cbb887",void 0],cbb887:["cbb429","cbb409",void 0],cbb409:["cbb887","cbb639",void 0],cbb639:["cbb409","cbb897",void 0],cbb897:["cbb639","cbb526",void 0],cbb526:["cbb897","cbb940",void 0],cbb940:["cbb526","cbb574",void 0],cbb574:["cbb940","cbb882",void 0],cbb882:["cbb574","cbb536",void 0],cbb536:["cbb882","cbb178",void 0],cbb178:["cbb536","cbb351",void 0],cbb351:["cbb178","cbb565",void 0],cbb565:["cbb351","cbb480",void 0],cbb480:["cbb565","cbb896",void 0],cbb896:["cbb480","cbb348",void 0],cbb348:["cbb896","cbb985",void 0],cbb985:["cbb348","cbb436",void 0],cbb436:["cbb985","cbb93",void 0],cbb93:["cbb436","cbb732",void 0],cbb732:["cbb93","cbb311",void 0],cbb311:["cbb732","cbb914",void 0],cbb914:["cbb311","cbb697",void 0],cbb697:["cbb914","cbb916",void 0],cbb916:["cbb697","cbb43",void 0],cbb43:["cbb916","cbb290",void 0],cbb290:["cbb43","cbb888",void 0],cbb888:["cbb290","cbb64",void 0],cbb64:["cbb888","cbb692",void 0],cbb692:["cbb64","cbb749",void 0],cbb749:["cbb692","cbb871",void 0],cbb871:["cbb749","cbb667",void 0],cbb667:["cbb871","cbb200",void 0],cbb200:["cbb667","cbb687",void 0],cbb687:["cbb200","cbb911",void 0],cbb911:["cbb687","cbb718",void 0],cbb718:["cbb911","cbb787",void 0],cbb787:["cbb718","cbb748",void 0],cbb748:["cbb787","cbb369",void 0],cbb369:["cbb748","cbb397",void 0],cbb397:["cbb369","cbb416",void 0],cbb416:["cbb397","cbb344",void 0],cbb344:["cbb416","cbb443",void 0],cbb443:["cbb344","cbb596",void 0],cbb596:["cbb443","cbb672",void 0],cbb672:["cbb596","cbb163",void 0],cbb163:["cbb672","cbb901",void 0],cbb901:["cbb163","cbb263",void 0],cbb263:["cbb901","cbb378",void 0],cbb378:["cbb263","cbb131",void 0],cbb131:["cbb378","cbb998",void 0],cbb998:["cbb131","cbb722",void 0],cbb722:["cbb998","cbb942",void 0],cbb942:["cbb722","cbb564",void 0],cbb564:["cbb942","cbb162",void 0],cbb162:["cbb564","cbb353",void 0],cbb353:["cbb162","cbb42",void 0],cbb42:["cbb353","cbb25",void 0],cbb25:["cbb42","cbb251",void 0],cbb251:["cbb25","cbb932",void 0],cbb932:["cbb251","cbb2",void 0],cbb2:["cbb932","cbb924",void 0],cbb924:["cbb2","cbb691",void 0],cbb691:["cbb924","cbb811",void 0],cbb811:["cbb691","cbb696",void 0],cbb696:["cbb811","cbb621",void 0],cbb621:["cbb696","cbb803",void 0],cbb803:["cbb621","cbb820",void 0],cbb820:["cbb803","cbb728",void 0],cbb728:["cbb820","cbb329",void 0],cbb329:["cbb728","cbb157",void 0],cbb157:["cbb329","cbb808",void 0],cbb808:["cbb157","cbb852",void 0],cbb852:["cbb808","cbb415",void 0],cbb415:["cbb852","cbb824",void 0],cbb824:["cbb415","cbb239",void 0],cbb239:["cbb824","cbb202",void 0],cbb202:["cbb239","cbb127",void 0],cbb127:["cbb202","cbb101",void 0],cbb101:["cbb127","cbb425",void 0],cbb425:["cbb101","cbb622",void 0],cbb622:["cbb425","cbb541",void 0],cbb541:["cbb622","cbb645",void 0],cbb645:["cbb541","cbb638",void 0],cbb638:["cbb645","cbb783",void 0],cbb783:["cbb638","cbb459",void 0],cbb459:["cbb783","cbb85",void 0],cbb85:["cbb459","cbb741",void 0],cbb741:["cbb85","cbb484",void 0],cbb484:["cbb741","cbb398",void 0],cbb398:["cbb484","cbb426",void 0],cbb426:["cbb398","cbb188",void 0],cbb188:["cbb426","cbb617",void 0],cbb617:["cbb188","cbb102",void 0],cbb102:["cbb617","cbb619",void 0],cbb619:["cbb102","cbb995",void 0],cbb995:["cbb619","cbb538",void 0],cbb538:["cbb995","cbb389",void 0],cbb389:["cbb538","cbb479",void 0],cbb479:["cbb389","cbb460",void 0],cbb460:["cbb479","cbb779",void 0],cbb779:["cbb460","cbb27",void 0],cbb27:["cbb779","cbb961",void 0],cbb961:["cbb27","cbb498",void 0],cbb498:["cbb961","cbb999",void 0],cbb999:["cbb498","cbb343",void 0],cbb343:["cbb999","cbb16",void 0],cbb16:["cbb343","cbb865",void 0],cbb865:["cbb16","cbb266",void 0],cbb266:["cbb865","cbb816",void 0],cbb816:["cbb266","cbb84",void 0],cbb84:["cbb816","cbb504",void 0],cbb504:["cbb84","cbb737",void 0],cbb737:["cbb504","cbb518",void 0],cbb518:["cbb737","cbb666",void 0],cbb666:["cbb518","cbb840",void 0],cbb840:["cbb666","cbb937",void 0],cbb937:["cbb840","cbb117",void 0],cbb117:["cbb937","cbb608",void 0],cbb608:["cbb117","cbb201",void 0],cbb201:["cbb608","cbb182",void 0],cbb182:["cbb201","cbb20",void 0],cbb20:["cbb182","cbb765",void 0],cbb765:["cbb20","cbb32",void 0],cbb32:["cbb765","cbb347",void 0],cbb347:["cbb32","cbb80",void 0],cbb80:["cbb347","cbb61",void 0],cbb61:["cbb80","cbb804",void 0],cbb804:["cbb61","cbb605",void 0],cbb605:["cbb804","cbb411",void 0],cbb411:["cbb605","cbb669",void 0],cbb669:["cbb411","cbb624",void 0],cbb624:["cbb669","cbb191",void 0],cbb191:["cbb624","cbb908",void 0],cbb908:["cbb191","cbb677",void 0],cbb677:["cbb908","cbb264",void 0],cbb264:["cbb677","cbb527",void 0],cbb527:["cbb264","cbb884",void 0],cbb884:["cbb527","cbb90",void 0],cbb90:["cbb884","cbb298",void 0],cbb298:["cbb90","cbb237",void 0],cbb237:["cbb298","cbb152",void 0],cbb152:["cbb237","cbb711",void 0],cbb711:["cbb152","cbb918",void 0],cbb918:["cbb711","cbb59",void 0],cbb59:["cbb918","cbb317",void 0],cbb317:["cbb59","cbb745",void 0],cbb745:["cbb317","cbb39",void 0],cbb39:["cbb745","cbb812",void 0],cbb812:["cbb39","cbb983",void 0],cbb983:["cbb812","cbb650",void 0],cbb650:["cbb983","cbb963",void 0],cbb963:["cbb650","cbb956",void 0],cbb956:["cbb963","cbb287",void 0],cbb287:["cbb956","cbb771",void 0],cbb771:["cbb287","cbb224",void 0],cbb224:["cbb771","cbb394",void 0],cbb394:["cbb224","cbb606",void 0],cbb606:["cbb394","cbb497",void 0],cbb497:["cbb606","cbb860",void 0],cbb860:["cbb497","cbb306",void 0],cbb306:["cbb860","cbb567",void 0],cbb567:["cbb306","cbb524",void 0],cbb524:["cbb567","cbb668",void 0],cbb668:["cbb524","cbb171",void 0],cbb171:["cbb668","cbb478",void 0],cbb478:["cbb171","cbb647",void 0],cbb647:["cbb478","cbb481",void 0],cbb481:["cbb647","cbb676",void 0],cbb676:["cbb481","cbb935",void 0],cbb935:["cbb676","cbb450",void 0],cbb450:["cbb935","cbb468",void 0],cbb468:["cbb450","cbb637",void 0],cbb637:["cbb468","cbb431",void 0],cbb431:["cbb637","cbb205",void 0],cbb205:["cbb431","cbb975",void 0],cbb975:["cbb205","cbb946",void 0],cbb946:["cbb975","cbb346",void 0],cbb346:["cbb946","cbb453",void 0],cbb453:["cbb346","cbb303",void 0],cbb303:["cbb453","cbb58",void 0],cbb58:["cbb303","cbb319",void 0],cbb319:["cbb58","cbb23",void 0],cbb23:["cbb319","cbb613",void 0],cbb613:["cbb23","cbb864",void 0],cbb864:["cbb613","cbb713",void 0],cbb713:["cbb864","cbb814",void 0],cbb814:["cbb713","cbb26",void 0],cbb26:["cbb814","cbb876",void 0],cbb876:["cbb26","cbb842",void 0],cbb842:["cbb876","cbb782",void 0],cbb782:["cbb842","cbb330",void 0],cbb330:["cbb782","cbb75",void 0],cbb75:["cbb330","cbb10",void 0],cbb10:["cbb75","cbb73",void 0],cbb73:["cbb10","cbb28",void 0],cbb28:["cbb73","cbb323",void 0],cbb323:["cbb28","cbb931",void 0],cbb931:["cbb323","cbb229",void 0],cbb229:["cbb931","cbb744",void 0],cbb744:["cbb229","cbb464",void 0],cbb464:["cbb744","cbb170",void 0],cbb170:["cbb464","cbb280",void 0],cbb280:["cbb170","cbb472",void 0],cbb472:["cbb280","cbb976",void 0],cbb976:["cbb472","cbb379",void 0],cbb379:["cbb976","cbb801",void 0],cbb801:["cbb379","cbb421",void 0],cbb421:["cbb801","cbb835",void 0],cbb835:["cbb421","cbb796",void 0],cbb796:["cbb835","cbb792",void 0],cbb792:["cbb796","cbb958",void 0],cbb958:["cbb792","cbb320",void 0],cbb320:["cbb958","cbb573",void 0],cbb573:["cbb320","cbb552",void 0],cbb552:["cbb573","cbb847",void 0],cbb847:["cbb552","cbb412",void 0],cbb412:["cbb847","cbb243",void 0],cbb243:["cbb412","cbb588",void 0],cbb588:["cbb243","cbb512",void 0],cbb512:["cbb588","cbb945",void 0],cbb945:["cbb512","cbb890",void 0],cbb890:["cbb945","cbb289",void 0],cbb289:["cbb890","cbb227",void 0],cbb227:["cbb289","cbb568",void 0],cbb568:["cbb227","cbb236",void 0],cbb236:["cbb568","cbb45",void 0],cbb45:["cbb236","cbb679",void 0],cbb679:["cbb45","cbb103",void 0],cbb103:["cbb679","cbb215",void 0],cbb215:["cbb103","cbb705",void 0],cbb705:["cbb215","cbb559",void 0],cbb559:["cbb705","cbb584",void 0],cbb584:["cbb559","cbb451",void 0],cbb451:["cbb584","cbb490",void 0],cbb490:["cbb451","cbb630",void 0],cbb630:["cbb490","cbb384",void 0],cbb384:["cbb630","cbb434",void 0],cbb434:["cbb384","cbb445",void 0],cbb445:["cbb434","cbb401",void 0],cbb401:["cbb445","cbb262",void 0],cbb262:["cbb401","cbb428",void 0],cbb428:["cbb262","cbb302",void 0],cbb302:["cbb428","cbb422",void 0],cbb422:["cbb302","cbb583",void 0],cbb583:["cbb422","cbb910",void 0],cbb910:["cbb583","cbb222",void 0],cbb222:["cbb910","cbb492",void 0],cbb492:["cbb222","cbb335",void 0],cbb335:["cbb492","cbb562",void 0],cbb562:["cbb335","cbb133",void 0],cbb133:["cbb562","cbb231",void 0],cbb231:["cbb133","cbb334",void 0],cbb334:["cbb231","cbb682",void 0],cbb682:["cbb334","cbb65",void 0],cbb65:["cbb682","cbb19",void 0],cbb19:["cbb65","cbb522",void 0],cbb522:["cbb19","cbb633",void 0],cbb633:["cbb522","cbb848",void 0],cbb848:["cbb633","cbb381",void 0],cbb381:["cbb848","cbb448",void 0],cbb448:["cbb381","cbb120",void 0],cbb120:["cbb448","cbb742",void 0],cbb742:["cbb120","cbb375",void 0],cbb375:["cbb742","cbb959",void 0],cbb959:["cbb375","cbb997",void 0],cbb997:["cbb959","cbb48",void 0],cbb48:["cbb997","cbb502",void 0],cbb502:["cbb48","cbb424",void 0],cbb424:["cbb502","cbb981",void 0],cbb981:["cbb424","cbb230",void 0],cbb230:["cbb981","cbb815",void 0],cbb815:["cbb230","cbb670",void 0],cbb670:["cbb815","cbb371",void 0],cbb371:["cbb670","cbb978",void 0],cbb978:["cbb371","cbb922",void 0],cbb922:["cbb978","cbb138",void 0],cbb138:["cbb922","cbb341",void 0],cbb341:["cbb138","cbb601",void 0],cbb601:["cbb341","cbb95",void 0],cbb95:["cbb601","cbb98",void 0],cbb98:["cbb95","cbb356",void 0],cbb356:["cbb98","cbb324",void 0],cbb324:["cbb356","cbb620",void 0],cbb620:["cbb324","cbb485",void 0],cbb485:["cbb620","cbb122",void 0],cbb122:["cbb485","cbb269",void 0],cbb269:["cbb122","cbb110",void 0],cbb110:["cbb269","cbb920",void 0],cbb920:["cbb110","cbb631",void 0],cbb631:["cbb920","cbb240",void 0],cbb240:["cbb631","cbb849",void 0],cbb849:["cbb240","cbb130",void 0],cbb130:["cbb849","cbb382",void 0],cbb382:["cbb130","cbb850",void 0],cbb850:["cbb382","cbb56",void 0],cbb56:["cbb850","cbb410",void 0],cbb410:["cbb56","cbb640",void 0],cbb640:["cbb410","cbb72",void 0],cbb72:["cbb640","cbb933",void 0],cbb933:["cbb72","cbb863",void 0],cbb863:["cbb933","cbb297",void 0],cbb297:["cbb863","cbb272",void 0],cbb272:["cbb297","cbb374",void 0],cbb374:["cbb272","cbb540",void 0],cbb540:["cbb374","cbb22",void 0],cbb22:["cbb540","cbb79",void 0],cbb79:["cbb22","cbb444",void 0],cbb444:["cbb79","cbb581",void 0],cbb581:["cbb444","cbb948",void 0],cbb948:["cbb581","cbb987",void 0],cbb987:["cbb948","cbb980",void 0],cbb980:["cbb987","cbb125",void 0],cbb125:["cbb980","cbb703",void 0],cbb703:["cbb125","cbb915",void 0],cbb915:["cbb703","cbb716",void 0],cbb716:["cbb915","cbb919",void 0],cbb919:["cbb716","cbb673",void 0],cbb673:["cbb919","cbb413",void 0],cbb413:["cbb673","cbb66",void 0],cbb66:["cbb413","cbb708",void 0],cbb708:["cbb66","cbb525",void 0],cbb525:["cbb708","cbb105",void 0],cbb105:["cbb525","cbb267",void 0],cbb267:["cbb105","cbb707",void 0],cbb707:["cbb267","cbb905",void 0],cbb905:["cbb707","cbb971",void 0],cbb971:["cbb905","cbb124",void 0],cbb124:["cbb971","cbb364",void 0],cbb364:["cbb124","cbb965",void 0],cbb965:["cbb364","cbb458",void 0],cbb458:["cbb965","cbb511",void 0],cbb511:["cbb458","cbb55",void 0],cbb55:["cbb511","cbb373",void 0],cbb373:["cbb55","cbb952",void 0],cbb952:["cbb373","cbb881",void 0],cbb881:["cbb952","cbb390",void 0],cbb390:["cbb881","cbb457",void 0],cbb457:["cbb390","cbb6",void 0],cbb6:["cbb457","cbb844",void 0],cbb844:["cbb6","cbb775",void 0],cbb775:["cbb844","cbb729",void 0],cbb729:["cbb775","cbb517",void 0],cbb517:["cbb729","cbb680",void 0],cbb680:["cbb517","cbb112",void 0],cbb112:["cbb680","cbb636",void 0],cbb636:["cbb112","cbb367",void 0],cbb367:["cbb636","cbb181",void 0],cbb181:["cbb367","cbb590",void 0],cbb590:["cbb181","cbb944",void 0],cbb944:["cbb590","cbb322",void 0],cbb322:["cbb944","cbb872",void 0],cbb872:["cbb322","cbb76",void 0],cbb76:["cbb872","cbb818",void 0],cbb818:["cbb76","cbb957",void 0],cbb957:["cbb818","cbb292",void 0],cbb292:["cbb957","cbb561",void 0],cbb561:["cbb292","cbb21",void 0],cbb21:["cbb561","cbb927",void 0],cbb927:["cbb21","cbb845",void 0],cbb845:["cbb927","cbb557",void 0],cbb557:["cbb845","cbb352",void 0],cbb352:["cbb557","cbb338",void 0],cbb338:["cbb352","cbb223",void 0],cbb223:["cbb338","cbb495",void 0],cbb495:["cbb223","cbb174",void 0],cbb174:["cbb495","cbb477",void 0],cbb477:["cbb174","cbb960",void 0],cbb960:["cbb477","",void 0]},this.s="cbb1",this.CFf=0}function cltothis(b,l,c){for(var i in l)b[i]=1==c?void 0:l[i]}function cbb_jsvmp(l,c,i,e,o,b,a){function n(b,l,c){for(;;){if(b.hasOwnProperty(l)){try{b[l]=c}catch(b){return void(this[l]=c)}break}if(null==(b=b.__proto__))return void(window[l]=c);w=9}}z=void 0!==a?(h=a.allthis,c=a.duei,l=a.all,e=a.shuz,o=a.argsList,v=a.a7,g=a.args,a.cbbb):(h=void 0!==b?b:l,g=[],l);var h,f,t,r,s,_,u,v,d,w,y,P,p,g,z,m=[5,2,1,2,2,2,1,2,13,1,13,1,1,2,2,10,1,2,1,2,2,1,5,1,2,2,2,1,8,2,2,5,2,1,2,2,2,1,1,1,2,2,1,2,2,2,1,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,8,2,7,1,4,2,2,1,1,10,1,2,2,2,7,13,2,2,1,1,2,2,1,1,2,2,2,2,2,11,1,2,1,2,2,2,2,2,8,1,6,2,13,9,1,2,2,2,2,4,2,2,1,2,1,2,2,2,8,2,2,2,1,2,2,1,2,1,2,2,1,2,2,12,2,1,9,1,13,6,11,1,2,5,1,2,1,2,2,2,1,1,10,2,6,1,2,2,12,2,2,8,2,1,2,2,2,1,2,2,2,1,2,1,1,4,2,2,1,10,2,2,2,2,2,2,2,2,10,2,2,9,1,2,2,1,1,8,1,2,1,1,8,13,1,2,2,1,2,1,2,6,2,1,9,2,1,1,1,1,2,2,1,2,2,1,1,2,1,2,1,2,1,2,2,2,4,1,2,10,1,2,13,2,2,2,2,2,1,2,1,2,11,2,2,2,1,1,2,1,2,1,6,2,1,2,2,1,2,2,1,2,12,2,2,2,12,11,9,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,2,8,1,1,6,2,2,2,2,2,2,1,2,2,2,1,5,1,2,2,2,2,2,7,13,5,1,9,2,2,2,8,2,2,2,1,1,7,2,2,1,2,2,1,2,1,2,2,2,2,2,1,2,2,2,1,1,4,2,1,2,5,9,2,1,2,2,2,2,2,10,7,2,2,10,1,1,2,2,1,2,2,1,2,1,5,7,1,2,8,2,2,2,1,2,1,2,1,1,10,2,2,2,1,2,2,8,2,2,1,11,1,2,11,2,1,2,2,2,2,1,1,2,2,2,1,1,2,2,2,2,2,13,11,4,2,2,2,2,1,2,2,1,1,2,10,1,1,2,2,2,2,1,2,2,2,2,1,2,6,2,2,2,1,4,2,2,1,1,2,6,2,2,2,2,1,2,2,6,2,2,1,2,1,2,1,1,4,2,2,1,4,2,1,1,2,2,2,2,7,1,1,2,1,2,2,2,2,2,2,2,2,2,1,2,2,1,1,1,2,1,1,1,11,12,5,2,2,2,2,2,1,2,2,2,1,7,2,7,2,1,1,2,2,1,2,8,2,1,2,1,2,2,2,2,9,2,2,2,2,1,2,2,11,1,2,2,1,2,2,2,2,2,2,2,10,2,5,2,2,12,2,2,2,1,1,1,2,2,2,2,2,1,2,2,12,2,2,2,1,2,1,2,2,1,7,2,1,13,4,1,2,10,1,2,6,2,1,2,1,2,1,2,1,2,1,2,11,1,2,2,2,2,1,2,2,1,2,2,2,1,2,1,2,2,2,5,2,2,1,2,12,1,2,2,2,5,2,1,11,2,1,2,8,2,1,1,2,2,2,1,2,1,12,2,2,2,2,2,5,2,2,2,2,2,12,1,2,2,1,10,1,7,5,1,2,2,2,1,1,2,1,1,4,2,2,1,1,2,1,1,2,2,2,2,2,1,1,2,3,2,2,2,1,2,1,8,2,2,6,1,1,9,1,2,1,2,13,1,2,2,1,2,12,9,1,1,1,2,2,2,2,1,2,1,2,2,10,13,1,2,2,2,1,2,1,2,2,1,1,2,1,2,1,2,2,2,2,2,1,2,2,1,2,2,2,1,2,5,13,9,11,2,2,2,1,2,1,2,7,2,2,10,2,2,2,2,4,2,2,1,3,1,2,1,2,2,1,2,1,2,2,2,2,2,2,13,1,2,11,2,2,6,1,1,2,1,2,13,2,8,1,2,12,6,1,1,2,4,1,8,12,1,12,2,1,1,2,1,1,1,1,5,2,1,2,2,1,1,1,2,2,2,1,2,2,2,2,1,12,7,2,1,2,2,2,1,1,2,7,13,2,1,2,1,2,1,1,2,2,1,1,2,1,8,1,1,2,2,2,2,1,2,2,2,2,2,2,2,2,1,1,2,2,1,1,2,2,2,2,2,1,2,2,1,1,1,2,8,6,2,1,1,1,10,2,1,2,9,1,2,2,13,2,2,2,2,1,2,2,2,4,2,2,2,2,10,2,1,2,1,2,1,2,1,2,2,2,5,2,2,2,2,9,2,12,1,2,2,2,2,2,1,10,2,2,2,1,11,2,2,2,1,9,1,12,1,2,2,2,2,1,2,2,2,1,5,2,1,1,2,2,2,2,1,2,2,1,2,11,1,1,2,1,2,2,2,5,2,2,2,2,8,5,2,1,2,7,1,2,1,1,2,2,1,2,6,2,2,2,5,2,13,2,2,1,2,1,2,2,2,2,1,2,2,11,2,1,2,2,9,1,2,9,2,2,1,2,1,2,2,1,4,2,1,1,2,1,2,2,1,2,5,2,1,2,13,1,2,2,2,2,2,1,1,1,1,12,13,13,1,1,5,7,2,2,1,2,1,2,11,1,2,1,1,6,1,2,2,12,2,2,2,2,12,1,1,2,9,2,2,1,2,1,2,5,2,9,2,2,1,1,4,1,2,2,13,1,2,1,2,2,5,2,1,6,1,2,2,2,5,3,2,2,1,2,2,1,2,2,2,1,1,2,1,2,1,12,1,2,2,2,2,2,2,2,2,2,2,1,1,2,2,1,1,2,9,13,2,1,4,2,1,8,2,2,6,2,2,8,1,1,1,2,11,2,2,1,2,2,2,1,2,2,2,1,1,2,2,3,2,1,1,2,1,2,1,1,2,2,2,6,1,2,1,2,2,2,6,2,2,2,2,1,2,1,13,2,2,10,1,2,11,1,1,2,6,1,2,2,2,2,1,2,2,2,2,2,11,2,2,2,7,2,1,2,2,2,2,1,12,5,13,1,2,2,1,1,2,5,1,1,2,13,1,2,2,1,2,2,1,12,2,12,10,8,1,1,2,1,2,2,2,1,1,1,2,2,2,1,1,2,2,9,2,1,1,2,1,2,1,2,2,2,2,2,2,1,1,2,2,2,1,2,2,2,10,2,2,1,6,1,2,2,2,2,1,2,2,1,2,10,2,1,2,13,2,2,1,2,2,2,13,9,1,1,1,2,2,1,1,2,2,2,1,2,2,10,2,2,2,2,1,2,2,2,2,1,2,2,1,2,1,6,2,1,12,2,2,2,1,2,1,2,1,2,2,8,8,1,6,10,10,2,2,10,1,6,5,1,2,2,8,2,2,1,1,2,2,2,1,2,1,2,2,2,9,2,1,2,1,1,2,2,2,1,12,2,2,1,3,2,2,1,1,2,1,2,2,1,2,2,2,1,2,2,11,1,2,2,1,1,2,2,1,1,1,1,5,2,1,1,1,2,1,10,1,1,13,1,2,7,2,11,2,2,1,2,2,1,2,2,2,5,2,1,1,2,1,1,2,10,1,2,2,1,2,1,2,2,9,2,2,2,8,2,2,1,2,2,6,2,2,5,2,5,2,2,1,2,1,1,9,5,8,9,7,1,2,1,2,2,2,1,2,1,1,4,9,7,2,2,2,2,2,2,11,2,2,1,1,2,2,1,2,2,2,2,2,2,2,10,2,1,2,2,1,2,1,1,2,13,7,1,2,2,1,2,1,5,2,2,2,2,1,2,8,2,1,1,2,2,2,2,2,2,1,2,1,2,2,1,2,1,1,2,8,1,2,1,2,11,1,1,2,1,2,2,1,1,2,2,2,2,1,1,2,2,2,2,1,1,1,1,2,2,1,1,4,1,1,2,2,2,4,6,2,1,2,2,4,2,2,2,2,1,2,2,2,2,2,13,1,2,2,1,2,1,1,2,2,2,11,2,2,13,2,2,13,2,9,2,2,2,2,2,1,2,1,2,2,2,2,1,2,2,1,2,1,2,2,2,2,1,2,2,1,1,2,1,2,2,2,1,2,1,1,1,1,2,2,2,14,1,1,2,2,2,4,11,10,2,1,2,1,2,1,2,2,2,2,2,11,2,2,6,2,2,5,9,2,2,1,1,14,1,7,9,2,1,2,2,1,2,2,1,2,2,2,2,1,2,2,1,2,2,2,2,1,1,2,8,2,2,1,2,4,1,6,2,2,2,1,1,2,2,1,2,1,2,2,2,1,2,1,2,2,2,1,1,1,2,2,1,2,13,2,1,2,1,5,1,2,7,1,2,2,1,2,1,13,6,2,1,11,1,2,2,1,7,2,2,3,2,2,1,11,1,2,1,2,4,2,2,12,2,2,2,2,2,2,2,2,2,2,2,1,1,1,4,2,3,9,5,2,2,2,1,2,11,1,2,1,1,2,1,2,2,1,1,1,9,2,2,1,1,2,1,1,2,2,2,1,1,2,8,6,2,1,2,2,2,1,2,1,5,1,1,2,2,1,2,8,12,2,1,2,1,7,2,2,2,2,2,2,1,1,2,2,2,1,1,2,2,2,13,2,2,9,1,4,4,2,2,2,2,1,1,2,4,12,1,2,6,13,2,2,2,2,1,2,1,1,6,2,1,2,2,2,2,1,1,1,2,2,2,1,5,2,12,2,2,2,2,2,2,2,2,2,2,2,2,6,1,2,12,1,10,2,2,2,7,2,2,9,1,2,7,2,2,1,8,2,2,13,2,1,2,2,2,1,12,2,2,1,2,2,11,7,1,1,2,2,2,2,1,2,1,2,1,2,10,1,1,2,1,2,2,2,2,7,2,2,1,2,2,11,2,2,7,2,13,12,2,11,2,1,1,2,2,1,1,1,2,1,1,2,2,1,2,2,1,1,2,2,2,2,2,2,9,1,2,1,2,2,2,2,2,2,2,4,1,1,9,2,2,1,2,3,1,1,2,4,2,2,2,2,1,2,2,2,2,2,2,2,1,8,1,1,1,2,2,2,1,2,2,2,1,2,1,2,2,2,1,1,2,2,2,10,2,5,2,2,2,2,11,1,2,2,2,1,1,9,2,2,1,1,7,1,2,2,2,2,2,1,2,2,1,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,2,2,2,1,2,10,1,1,1,2,9,1,2,2,1,2,2,12,2,13,2,2,1,1,2,1,2,11,2,4,2,2,1,2,2,1,2,1,2,1,2,2,1,2,1,2,2,2,1,1,2,1,2,2,2,2,2,2,1,8,1,2,2,1,2,13,10,2,1,4,2,1,2,2,1,2,2,2,2,13,1,2,2,2,2,2,2,2,2,2,2,2,13,1,2,2,2,1,1,2,2,1,2,1,2,2,2,2,2,2,1,2,2,2,2,5,1,1,2,2,2,6,1,6,2,7,1,2,10,2,12,2,2,2,1,1,1,2,1,1,1,1,2,2,2,2,1,2,1,2,2,7,2,1,2,1,2,2,2,1,4,1,1,2,2,1,2,2,2,2,1,1,1,2,2,2,2,2,4,2,2,2,2,1,1,2,2,8,1,2,2,5,2,9,2,1,2,1,2,11,2,2,1,1,2,1,1,1,1,2,2,1,2,1,1,10,10,2,8,2,5,1,2,1,1,1,1,2,2,2,12,2,2,2,2,2,1,2,2,9,2,1,1,1,1,2,5,4,1,2,1,2,2,1,2,2,11,2,2,2,7,12,2,2,2,2,2,2,1,1,11,2,1,2,2,2,2,10,2,1,1,2,11,2,2,2,7,2,2,1,1,2,2,13,1,1,2,1,2,2,2,2,1,2,2,1,6,2,2,2,1,2,2,13,2,1,2,2,2,2,8,2,2,1,1,2,2,2,2,2,2,1,1,2,1,1,2,11,1,2,2,1,2,4,2,2,1,1,1,2,2,11,2,2,2,1,12,2,7,2,1,1,2,2,2,1,2,2,1,7,2,1,2,1,1,1,1,2,1,1,2,2,2,2,8,2,1,2,1,2,2,1,8,2,2,1,2,2,2,8,2,2,2,2,10,2,3,2,1,1,2,1,2,2,12,2,1,2,1,1,2,1,2,2,11,2,7,2,2,2,2,11,2,2,1,2,2,2,1,2,12,1,1,2,1,1,1,10,2,6,2,1,1,1,2,1,2,12,2,2,5,2,10,1,2,1,1,1,1,1,2,10,5,2,2,1,2,2,2,2,2,2,1,2,2,1,2,6,2,11,1,2,1,2,2,2,2,1,1,5,1,6,1,2,2,2,2,2,1,2,1,10,1,8,2,2,2,2,1,2,2,1,13,2,1,2,10,2,2,1,2,2,2,2,2,1,2,1,1,5,2,1,1,9,2,2,2,1,2,2,11,2,2,1,1,1,2,1,1,2,2,2,1,1,2,13,1,7,1,2,2,12,1,5,2,13,2,2,1,1,1,6,2,8,12,1,2,6,11,2,1,3,8,2,2,2,2,1,2,2,4,2,2,1,1,1,1,2,2,2,2,6,2,2,4,10,2,2,6,1,2,1,2,1,2,2,2,2,2,1,1,8,2,2,2,2,1,2,12,2,2,2,2,2,12,1,2,1,8,1,2,2,7,2,2,12,2,2,6,2,7,1,2,1,1,2,2,1,6,2,2,1,2,1,8,2,11,12,1,13,2,8,7,2,2,7,1,1,1,2,1,1,2,2,2,6,1,1,1,5,9,2,2,2,2,2,2,2,2,1,2,12,2,7,1,2,1,2,2,11,2,2,2,2,4,2,2,2,2,2,2,2,1,2,1,2,6,12,2,2,1,1,2,2,1,9,1,8,2,1,2,2,1,2,2,10,2,2,2,2,1,1,2,8,2,2,1,1,1,11,2,1,2,2,2,11,2,1,2,1,2,2,2,1,2,2,2,7,2,1,1,2,2,1,2,1,2,2,8,2,1,2,2,1,2,2,2,2,2,1,1,2,2,2,2,7,2,1,7,4,1,1,1,1,2,8,1,2,2,5,1,12,2,1,2,2,1,1,2,1,2,2,1,2,12,1,2,2,1,2,12,10,2,8,14,2,13,2,9,1,2,2,1,2,2,1,11,2,2,1,2,1,1,2,6,1,2,10,2,2,9,1,1,1,2,1,1,2,2,2,11,5,1,2,7,1,1,2,2,2,1,2,1,4,2,2,2,1,2,11,2,13,1,2,11,2,2,2,12,2,2,7,12,1,1,1,6,2,1,1,2,1,2,2,2,1,1,2,2,2,1,2,2,2,2,2,2,2,2,11,2,2,9,2,6,1,2,1,1,1,2,1,1,2,2,2,2,2,2,1,2,8,2,1,1,2,2,1,1,1,2,1,2,2,1,2,2,2,2,6,2,2,1,1,2,1,2,2,2,2,2,2,2,11,2,4,1,8,10,8,1,1,1,2,12,2,2,2,2,1,2,2,2,2,1,1,6,1,2,2,1,2,10,2,1,1,1,2,2,2,8,1,11,4,2,2,2,1,2,2,1,2,1,1,1,2,2,2,2,2,9,2,2,6,2,1,1,1,1,9,1,2,1,1,2,2,1,3,2,2,10,1,2,10,1,2,11,2,12,2,2,2,1,2,1,1,2,1,1,2,1,1,2,2,2,2,4,13,1,2,1,2,1,1,2,8,2,2,2,2,1,8,1,2,2,10,1,2,3,2,1,2,2,2,1,1,2,2,1,2,1,2,2,2,2,2,2,2,1,10,2,2,2,1,2,2,2,10,2,2,8,2,1,1,2,1,2,1,2,4,2,2,2,5,2,2,2,2,2,2,1,1,1,2,11,2,10,1,1,2,2,2,2,2,2,1,1,2,2,2,1,2,8,2,2,2,1,2,1,2,2,1,2,13,2,7,1,2,1,2,2,2,1,2,2,2,2,6,1,2,2,2,2,1,2,12,12,1,1,1,1,2,1,9,2,2,1,5,2,2,1,6,1,2,9,1,1,14,2,2,1,9,2,2,2,3,1,2,2,2,1,2,1,10,1,2,2,2,2,8,1,11,2,2,11,2,2,1,2,2,2,1,1,1,2,1,2,1,2,7,2,2,2,2,1,1,11,1,2,2,1,6,2,1,2,1,2,2,2,5,2,2,2,1,4,1,1,1,2,2,2,1,2,1,1,2,1,1,2,2,1,2,1,2,2,2,2,2,1,1,2,13,13,2,1,1,1,2,2,1,1,2,1,1,9,2,1,2,2,2,2,2,1,4,1,2,2,2,2,1,2,2,7,2,2,2,9,1,2,1,2,2,2,1,2,2,2,2,6,2,2,2,2,2,1,2,2,1,2,13,1,1,1,2,1,1,1,1,2,2,2,5,2,10,1,2,2,2,2,1,1,1,4,2,2,1,2,1,2,13,2,10,1,2,1,2,1,1,2,2,8,8,1,2,2,2,2,2,2,2,2,1,1,1,2,2,2,1,12,2,2,1,2,1,1,2,2,2,2,1,2,2,1,3,9,2,2,2,2,2,2,2,7,2,1,1,10,4,10,1,1,2,2,1,2,1,5,1,1,2,1,10,2,2,2,1,2,1,2,2,2,2,2,1,2,13,2,5,2,2,12,2,2,2,2,2,4,8,2,2,2,5,9,2,2,1,2,1,1,1,1,1,1,2,2,2,1,2,2,1,1,1,1,2,2,2,1,1,12,2,2,1,2,2,2,6,2,2,2,2,12,2,1,2,1,2,2,2,2,2,2,1,2,2,2,5,2,2,2,1,12,2,12,13,1,7,2,2,2,10,2,2,2,2,2,7,2,2,1,1,1,7,1,1,11,1,1,1,2,2,2,2,2,1,2,2,2,1,2,1,2,2,1,2,2,1,2,3,2,1,2,1,11,2,2,1,1,1,12,1,2,2,2,1,2,11,10,2,3,1,6,1,1,2,2,1,2,2,6,1,1,1,2,2,2,2,2,1,1,2,2,1,1,6,2,5,12,1,2,2,2,2,2,2,1,2,2,2,2,1,2,2,1,2,2,1,2,2,2,1,2,1,5,8,2,2,2,2,2,2,1,2,10,2,2,2,1,2,2,1,9,2,2,2,2,2,2,2,2,8,2,1,1,2,13,1,13,2,1,1,2,2,2,2,1,2,2,12,2,2,2,2,2,2,1,8,1,2,2,2,2,12,2,1,1,2,2,1,1,2,2,1,2,1,8,2,1,1,2,1,2,11,1,2,1,2,2,2,2,12,2,10,2,3,1,1,1,8,2,9,2,2,1,1,5,2,2,2,1,1,1,1,5,11,2,1,2,2,13,2,1,1,1,1,2,2,1,2,1,2,2,1,2,2,12,2,2,2,2,2,2,2,2,2,2,2,4,2,2,1,2,1,2,1,11,2,7,1,2,1,2,1,2,4,13,2,13,13,1,1,1,2,2,1,2,2,2,1,1,2,1,1,2,2,1,2,2,1,2,2,2,2,2,1,5,12,1,2,1,4,2,2,2,2,2,1,2,2,2,2,13,2,2,1,2,8,2,2,1,1,1,2,1,8,1,2,12,11,10,2,2,2,1,2,2,5,1,2,1,2,7,2,1,2,2,2,2,2,2,1,2,2,2,9,2,2,1,13,1,2,1,2,2,1,2,9,2,2,2,2,2,6,2,1,2,8,2,2,1,2,2,1,2,1,2,2,2,4,4,2,2,1,2,1,1,1,2,2,1,1,1,9,2,2,1,6,2,2,2,2,1,2,1,2,2,1,1,2,2,10,2,2,1,11,2,1,1,2,2,2,2,7,1,2,2,2,9,2,2,2,10,2,2,1,1,1,2,2,2,2,1,2,2,2,2,2,1,2,2,2,2,2,2,2,2,11,2,1,2,2,2,1,2,12,2,1,2,2,1,1,1,1,1,2,2,2,2,1,1,2,1,1,2,2,2,7,2,2,1,9,1,2,2,7,2,2,5,2,2,2,2,1,2,2,2,4,2,2,2,1,2,2,2,2,2,1,1,1,13,2,2,2,2,4,2,3,12,2,2,2,1,2,1,10,2,1,1,1,12,2,1,2,12,5,1,2,2,1,1,2,2,5,2,2,8,10,2,13,9,2,2,2,7,2,1,2,2,2,12,2,2,2,2,6,1,2,4,1,2,2,1,1,1,1,2,2,2,1,2,2,2,2,2,6,1,2,11,5,1,2,1,2,1,1,1,2,1,6,1,1,1,1,2,2,7,1,1,1,2,2,1,1,1,1,2,1,2,2,1,2,4,8,2,13,2,10,2,2,2,2,2,10,13,2,2,2,2,2,1,2,2,2,2,2,4,1,1,1,2,5,2,2,2,1,1,2,2,2,1,2,7,1,12,4,2,2,2,12,2,7,2,2,2,1,2,9,2,1,1,2,2,14,1,13,2,2,2,9,1,2,1,2,2,3,2,1,2,1,2,2,1,7,1,2,2,1,1,2,2,1,2,1,2,2,2,2,10,9,1,2,10,2,4,2,2,2,1,2,2,12,3,2,2,2,1,1,1,1,2,2,1,2,1,2,1,2,1,1,2,2,2,2,2,1,1,2,10,1,2,2,1,2,1,2,2,2,2,7,1,2,2,4,1,2,2,2,1,1,2,2,10,1,1,9,2,2,2,10,2,6,1,13,1,2,2,2,2,1,1,1,2,2,2,1,1,2,1,1,1,1,1,2,2,2,1,10,1,2,2,2,2,2,2,2,1,2,1,1,2,2,8,2,1,1,1,12,2,8,7,2,2,2,1,11,2,2,2,2,1,2,1,1,1,1,2,2,2,2,2,2,1,1,1,11,1,11,2,2,11,2,1,1,2,1,2,1,1,2,8,2,2,1,2,2,2,2,2,7,1,2,1,2,1,2,2,2,2,1,2,1,8,1,2,2,2,1,1,1,2,1,1,1,1,1,2,2,5,2,2,2,2,2,6,7,2,2,6,1,1,1,2,2,1,2,2,1,4,2,1,2,1,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,12,2,2,2,1,1,2,1,2,2,1,2,1,13,2,1,2,1,1,1,2,2,2,2,1,1,2,2,2,2,2,2,1,2,2,2,9,2,2,12,2,2,1,11,2,1,2,10,2,10,1,9,1,2,11,1,1,2,1,2,5,1,2,1,11,1,1,3,10,6,1,2,1,2,2,2,12,2,2,2,2,2,9,1,2,6,1,2,6,1,1,2,1,1,2,1,2,2,4,1,12,2,13,1,2,1,2,2,1,2,2,10,2,2,2,2,1,1,2,2,2,2,2,2,8,2,2,1,7,2,1,1,2,1,2,2,13,2,2,2,1,2,2,2,2,2,1,2,8,2,2,2,2,9,1,2,2,1,2,2,2,1,2,2,2,5,2,2,2,2,2,7,11,6,2,1,1,2,2,1,2,6,2,2,2,2,2,2,1,2,2,1,1,2,8,2,1,1,1,2,5,10,1,2,2,2,2,9,1,2,9,1,2,1,2,8,2,2,4,2,2,2,1,2,2,1,9,12,2,2,6,2,2,1,1,6,2,2,2,2,9,2,11,10,1,2,2,1,2,2,2,7,2,2,1,2,2,2,2,2,2,1,2,1,2,1,1,2,2,1,2,1,5,2,2,2,1,2,1,1,1,1,2,12,7,4,13,2,2,2,2,2,1,11,2,2,2,2,2,12,2,11,1,1,2,1,1,2,2,2,2,2,7,6,2,2,2,2,2,1,2,7,2,2,2,2,1,2,2,2,2,2,1,13,2,2,1,1,12,1,2,13,2,4,2,2,1,7,1,8,1,1,2,2,2,2,2,2,2,2,11,2,9,2,2,9,2,2,1,2,11,1,2,1,2,2,1,1,1,2,1,2,1,2,2,1,1,2,2,2,1,1,2,4,1,2,2,8,13,1,2,2,2,1,2,2,1,2,1,2,5,1,2,2,1,1,1,2,2,2,2,1,2,2,2,2,2,1,2,2,2,2,1,4,2,1,2,6,8,2,1,2,2,1,1,1,2,2,1,2,10,1,2,1,2,1,2,2,4,2,1,1,2,1,2,2,1,2,1,1,2,2,6,7,13,8,2,1,1,1,2,2,2,2,8,2,11,1,8,2,2,1,2,1,2,9,2,2,4,2,1,2,2,4,1,2,1,1,2,11,7,1,10,2,2,1,2,2,2,2,1,2,1,2,1,1,1,1,2,2,2,9,1,1,1,1,2,2,2,2,1,2,2,2,2,2,2,2,1,1,2,2,1,1,2,2,5,1,7,2,8,1,2,2,2,1,1,2,2,9,1,5,2,2,2,2,3,1,1,1,2,1,8,1,2,2,6,1,1,2,2,2,2,1,8,1,2,1,2,2,10,2,2,8,1,1,2,1,2,1,2,2,6,2,1,1,1,2,1,2,2,2,2,1,2,2,13,2,1,2,11,7,2,2,2,2,1,1,2,1,2,2,2,2,2,2,8,1,1,2,8,13,2,2,1,2,1,1,4,2,12,1,1,1,1,1,1,2,1,2,13,1,2,2,1,10,1,11,2,1,2,2,1,2,2,1,13,2,2,2,2,2,1,2,2,1,2,13,1,2,1,2,1,2,12,2,2,2,1,1,1,2,1,2,2,6,2,1,2,2,2,2,2,8,1,6,1,2,1,2,2,2,1,1,2,2,1,1,2,2,1,1,2,2,4,2,6,7,1,1,1,2,7,1,1,1,2,2,2,7,2,8,2,1,2,1,11,12,2,2,1,1,2,2,5,2,2,3,2,2,2,11,2,1,1,2,1,12,10,2,10,2,1,1,1,2,1,12,5,9,13,2,9,2,13,2,13,10,2,2,1,1,1,1,2,5,1,2,1,1,8,2,2,2,2,2,1,2,10,1,2,1,2,1,2,7,1,1,1,2,2,2,1,1,2,2,2,1,2,2,2,2,2,12,2,1,3,1,2,1,1,2,9,1,1,2,2,2,1,2,5,2,1,1,2,2,2,2,2,2,2,2,1,2,2,2,4,2,2,1,2,1,1,1,2,6,2,2,1,2,1,11,1,2,10,1,2,1,2,2,1,2,2,2,1,1,1,2,1,2,1,1,1,11,2,2,1,2,2,2,2,1,6,1,1,1,1,2,1,1,11,2,1,8,2,2,4,1,2,2,1,1,8,2,2,2,1,2,2,2,12,1,2,1,8,1,2,7,11,1,2,1,2,2,2,10,2,2,1,2,1,2,2,2,1,2,2,2,2,1,2,1,2,1,2,1,1,1,2,2,2,1,1,8,2,1,2,1,2,2,1,2,11,1,2,2,10,14,2,8,2,11,1,2,2,1,1,2,2,2,2,5,2,1,2,2,1,2,2,2,1,2,2,12,2,1,2,4,2,2,1,6,2,2,2,2,2,2,2,2,1,7,2,1,2,2,9,3,9,2,2,7,1,1,2,2,2,2,2,8,1,1,2,2,1,2,2,2,9,1,10,1,6,2,1,1,2,1,2,10,2,10,2,2,2,2,2,1,9,5,1,2,1,1,2,13,2,2,2,2,2,2,2,2,2,2,1,6,1,2,2,4,2,2,2,1,2,2,10,2,1,2,2,2,2,1,6,4,7,1,2,2,2,1,2,2,1,1,1,2,2,2,1,1,2,1,2,1,1,1,2,1,2,1,2,2,2,1,2,2,2,2,2,2,2,1,2,1,2,1,2,2,2,2,2,2,1,2,1,2,1,8,2,2,2,2,6,2,1,1,10,7,2,1,1,1,1,3,2,2,1,1,2,1,2,2,7,2,2,2,1,1,2,2,1,1,2,2,2,2,1,2,1,2,2,1,2,2,2,2,2,2,1,2,2,9,2,2,1,2,2,1,8,2,2,2,2,1,1,2,1,5,2,2,2,2,2,1,6,2,2,1,1,2,2,1,2,1,2,2,2,2,2,1,2,1,1,2,2,5,2,2,1,1,1,2,1,1,5,2,2,2,2,1,2,2,7,2,2,2,1,1,2,2,1,1,10,7,1,2,1,2,2,2,6,2,1,2,2,2,2,2,1,2,2,2,2,13,2,8,2,2,1,1,9,2,2,2,1,2,2,1,4,1,2,1,2,2,2,2,1,2,2,2,8,2,2,1,2,1,2,2,1,6,12,1,2,2,2,13,1,2,1,1,2,2,2,2,1,13,2,6,1,2,1,11,2,1,2,1,2,5,2,2,1,2,1,2,1,1,2,1,10,2,1,1,1,2,2,2,13,2,1,1,2,2,7,2,2,1,2,2,2,1,2,4,1,1,2,13,1,6,2,2,2,2,1,2,2,2,1,2,2,2,1,2,1,2,1,1,2,1,2,1,1,1,2,1,1,2,4,2,1,1,2,2,1,1,1,13,2,2,2,2,1,1,2,1,8,8,2,2,2,1,2,1,1,2,2,2,2,1,2,9,1,2,8,2,7,2,7,1,1,2,2,2,1,1,2,8,2,1,10,2,1,2,6,10,2,1,2,1,2,2,1,1,2,2,7,2,1,2,2,2,1,1,2,2,1,1,1,2,2,1,2,2,2,2,2,7,1,9,1,1,1,2,2,12,2,1,1,1,1,2,11,1,1,2,13,2,1,1,1,1,2,9,2,10,2,2,2,2,2,2,2,1,2,2,2,11,2,13,11,1,1,1,1,2,1,6,13,1,2,2,2,2,2,2,1,1,13,1,1,2,2,1,2,1,10,6,2,2,1,2,1,2,1,13,2,2,1,1,2,1,2,2,2,2,2,2,1,6,2,1,1,1,6,1,1,1,2,5,1,2,7,7,8,1,6,2,10,2,11,1,2,2,2,2,1,2,5,1,4,2,2,2,2,2,4,1,11,1,1,2,2,2,2,1,2,2,2,1,1,2,2,1,5,2,1,2,1,2,1,1,2,8,2,2,1,2,2,1,1,2,4,2,2,13,1,1,1,7,1,2,9,1,13,1,2,2,1,1,1,13,1,1,2,1,1,1,1,7,1,5,2,1,2,11,2,2,2,2,2,2,1,1,1,2,2,1,2,2,2,1,11,11,1,2,13,2,2,1,2,2,1,1,2,1,2,1,2,2,2,2,2,2,10,2,2,2,2,6,1,1,2,1,1,1,2,2,1,1,2,1,1,1,2,8,2,2,1,2,1,2,10,1,1,1,1,2,2,2,1,2,2,1,2,1,2,7,1,2,1,2,2,2,4,1,5,14,1,2,10,2,2,2,2,2,2,2,1,2,1,2,2,8,3,1,2,12,2,8,2,8,2,2,1,1,2,4,2,1,1,2,2,1,2,2,1,2,1,1,1,2,2,9,2,2,2,6,1,2,2,4,1,1,2,2,1,1,1,2,2,1,1,2,1,1,2,1,1,2,2,2,1,1,2,1,2,1,6,2,2,4,1,2,2,2,2,2,1,1,2,2,1,1,6,2,2,1,1,2,2,13,1,14,1,2,1,1,1,2,1,2,1,1,7,1,12,2,2,1,2,2,2,2,2,2,13,1,2,2,1,1,1,2,2,2,8,2,2,2,1,1,4,2,2,2,1,2,4,2,2,2,2,2,2,1,2,2,1,1,2,2,5,2,2,2,2,1,2,2,2,1,2,1,11,2,2,2,2,1,2,1,1,1,2,8,2,1,5,1,2,2,1,2,2,1,1,2,2,2,2,2,2,2,2,2,1,2,2,2,7,1,1,1,1,2,2,2,1,2,2,2,1,1,2,2,2,1,2,2,12,2,2,1,2,2,2,2,2,1,2,2,1,8,2,1,2,2,2,2,1,2,1,1,2,2,10,2,5,10,8,10,1,1,10,1,2,1,2,5,9,1,1,2,2,2,1,9,2,9,2,2,1,2,1,2,1,2,12,2,2,1,1,2,2,1,1,2,2,2,2,2,2,12,2,1,1,1,1,1,2,2,2,2,2,1,2,1,2,2,2,2,13,1,2,10,1,2,1,2,2,8,2,2,2,1,2,2,2,2,1,2,2,2,2,2,1,6,1,3,2,2,2,2,2,2,2,2,1,6,2,1,1,2,2,7,1,2,2,2,13,2,2,2,2,2,1,2,2,10,2,1,2,2,1,2,2,1,8,2,9,2,2,2,2,8,10,9,2,2,2,2,2,2,2,1,1,7,1,2,1,7,7,12,1,1,2,2,1,2,2,1,2,1,1,1,2,1,9,2,1,2,2,2,2,1,2,2,2,1,2,9,1,1,2,1,1,1,3,1,1,2,2,2,2,2,2,1,1,1,1,2,2,2,7,1,1,1,12,1,2,2,9,2,2,6,1,2,2,2,2,1,1,2,2,2,2,1,2,2,2,2,2,2,1,2,2,2,1,2,2,2,1,1,9,1,2,2,1,2,2,2,2,2,1,2,1,2,2,1,2,1,1,1,8,2,2,1,2,2,1,1,2,12,2,2,2,9,1,2,1,2,1,2,2,1,12,1,13,13,2,1,11,1,2,1,1,13,2,2,2,2,2,2,10,13,2,2,1,2,2,1,2,7,2,2,1,7,2,2,2,1,11,2,1,4,1,1,2,2,2,1,1,2,2,2,1,2,2,11,8,2,9,2,2,7,2,1,5,2,2,2,2,5,2,2,2,1,2,1,6,2,1,1,2,2,2,2,13,2,2,2,2,2,2,12,12,1,2,1,2,2,2,1,2,2,2,2,12,13,8,8,2,2,2,2,2,2,1,2,1,2,13,1,10,1,1,2,4,2,13,2,2,2,1,2,2,2,2,9,13,2,2,9,7,9,2,2,2,13,2,2,2,1,2,2,2,1,2,7,1,2,2,2,1,2,2,1,2,2,1,2,2,6,1,1,1,1,1,2,2,1,5,1,2,1,1,2,2,1,2,1,2,2,2,1,1,1,2,2,2,8,2,2,6,2,2,1,1,1,2,2,1,2,1,2,1,1,2,14,1,2,2,1,1,9,2,1,2,1,1,2,1,10,1,1,1,2,11,1,2,1,2,1,2,2,2,1,2,13,2,2,1,2,2,2,2,2,2,2,1,2,2,1,1,2,1,1,2,2,2,2,2,2,2,2,1,3,9,1,2,6,2,2,1,2,2,12,1,2,13,8,11,2,1,2,2,2,12,2,2,1,6,7,2,2,2,2,2,2,2,2,1,1,2,2,1,2,2,1,2,1,2,2,2,11,2,2,2,1,1,12,1,1,2,2,1,4,2,2,1,2,2,2,2,4,1,2,2,2,2,6,1,8,10,2,2,1,2,2,4,5,7,1,2,1,1,1,2,1,1,2,1,2,2,2,2,2,1,5,12,2,2,2,2,1,1,2,8,2,2,2,2,2,2,11,9,2,1,5,7,2,2,2,9,2,2,2,1,1,1,1,1,1,2,10,2,2,2,2,2,2,8,1,2,2,11,6,12,2,1,2,1,1,2,1,2,2,12,2,1,9,2,1,11,2,2,2,2,1,2,1,9,2,1,1,2,2,1,2,2,1,2,2,1,2,2,2,1,2,2,1,2,2,2,2,2,2,2,1,6,1,2,2,1,1,13,2,2,2,1,5,1,11,2,2,2,2,2,2,1,2,2,1,1,1,1,5,13,2,1,5,1,2,1,2,2,2,9,2,2,2,1,1,1,2,1,2,1,2,2,2,2,6,6,1,2,9,1,1,2,4,2,1,2,2,1,9,1,2,2,2,1,2,2,2,1,2,13,5,1,1,11,1,1,1,2,2,1,2,1,2,2,1,10,1,2,2,2,2,2,2,2,2,2,8,9,4,2,1,1,2,2,2,1,1,1,7,2,2,2,1,2,2,2,2,1,2,1,1,4,1,2,2,1,2,11,2,1,2,2,1,2,11,1,1,2,2,1,1,2,1,1,1,2,2,2,1,2,1,2,2,1,7,2,2,1,2,2,2,2,2,1,2,2,1,1,2,1,1,2,2,2,2,2,1,12,2,1,1,2,2,1,2,2,1,1,2,1,13,1,2,2,2,1,1,1,2,1,2,2,10,2,2,1,5,1,8,1,2,2,1,2,2,8,1,2,1,2,2,3,1,2,2,2,13,2,5,2,2,2,2,2,2,2,2,9,1,1,2,10,2,2,1,1,2,1,2,2,11,2,2,1,1,1,2,2,9,2,5,1,2,1,1,2,2,2,2,1,2,1,2,2,2,2,2,2,2,1,2,2,1,1,1,2,1,2,2,1,2,2,2,2,2,2,2,4,2,1,2,2,8,1,1,2,11,11,1,1,1,1,2,2,2,2,1,1,2,2,1,1,1,2,2,2,2,1,1,2,2,1,2,1,1,2,2,2,1,1,2,2,10,2,1,2,1,1,2,2,1,1,6,2,8,2,2,2,9,2,2,1,1,1,1,9,1,2,2,2,2,1,1,2,2,1,11,2,6,2,2,1,2,1,11,2,1,2,1,1,2,1,9,2,1,2,2,1,1,1,2,8,2,2,1,4,1,2,1,1,2,1,1,2,2,2,1,1,1,1,1,1,2,2,1,2,2,2,2,1,2,1,2,2,2,2,2,2,2,1,13,11,2,2,2,13,1,2,2,9,1,1,2,2,2,2,2,14,2,2,1,2,2,2,1,4,1,1,8,2,9,12,2,8,2,2,2,1,10,4,1,6,2,2,12,1,2,2,5,2,2,2,10,5,1,2,2,1,2,1,2,11,1,1,2,2,1,2,12,2,13,1,2,2,2,10,1,2,1,2,2,11,2,2,2,1,1,8,2,2,1,2,1,2,2,2,2,5,2,2,9,1,2,2,2,1,1,2,2,2,2,2,2,6,1,2,2,1,2,1,2,5,1,1,2,1,2,1,2,2,2,2,1,1,2,2,2,2,1,2,1,1,2,2,2,2,1,2,1,1,1,7,2,1,2,2,1,2,1,2,2,2,1,6,1,1,1,1,5,2,5,2,1,1,2,2,2,2,2,2,1,2,12,2,2,2,2,2,2,2,1,1,2,1,2,1,2,1,2,2,2,2,2,2,2,12,5,1,2,2,1,1,1,2,11,2,1,2,14,2,2,9,2,2,1,2,7,2,4,2,1,2,1,2,2,2,2,2,1,8,2,1,9,1,2,1,2,2,2,12,1,1,1,2,9,1,2,2,4,2,1,2,2,2,1,4,2,2,1,2,2,2,2,2,1,2,2,8,1,2,1,2,1,1,1,2,2,2,2,1,2,9,2,1,1,2,2,2,13,2,11,2,4,7,1,2,10,1,1,2,10,8,2,8,2,1,2,1,2,2,2,2,1,1,2,1,2,13,2,1,7,2,2,1,1,2,4,1,12,2,5,2,2,2,2,2,2,2,2,2,2,2,13,1,1,7,11,2,7,1,1,1,2,2,1,1,2,1,1,2,2,1,9,1,1,7,2,2,2,1,2,2,2,2,8,1,2,2];for(let b=0;b<m.length;b++)1==m[b]?m[b]=c.cf.bind(c).call.bind(c.cf.bind(c),c.cf.bind(c)):2==m[b]?m[b]=c.sf.bind(c).call.bind(c.sf.bind(c),c.sf.bind(c)):3==m[b]?m[b]=c.Cf.bind(c).call.bind(c.Cf.bind(c),c.Cf.bind(c)):m[b]=c.cF.bind(c).call.bind(c.cF.bind(c),c.cF.bind(c));var x=m[0],E=m[1],I=m[2],R=m[3],F=m[4],j=m[5],C=m[6],k=m[7],L=m[8],T=m[9],S=m[10],B=m[11],q=m[12],A=m[13],G=m[14],O=m[15],N=m[16],D=m[17],$=m[18],Q=m[19],U=m[20],Y=m[21],H=m[22],K=m[23],W=m[24],J=m[25],V=m[26],Z=m[27],X=m[28],b2=m[29],l2=m[30],c2=m[31],i2=m[32],e2=m[33],o2=m[34],a2=m[35],n2=m[36],h2=m[37],f2=m[38],t2=m[39],r2=m[40],s2=m[41],_2=m[42],u2=m[43],v2=m[44],d2=m[45],w2=m[46],y2=m[47],P2=m[48],p2=m[49],g2=m[50],z2=m[51],m2=m[52],x2=m[53],E2=m[54],I2=m[55],R2=m[56],F2=m[57],j2=m[58],C2=m[59],k2=m[60],L2=m[61],T2=m[62],S2=m[63],M2=m[64],B2=m[65],q2=m[66],A2=m[67],G2=m[68],O2=m[69],N2=m[70],D2=m[71],$2=m[72],Q2=m[73],U2=m[74],Y2=m[75],H2=m[76],K2=m[77],W2=m[78],J2=m[79],V2=m[80],Z2=m[81],X2=m[83],b1=m[84],l1=m[85],c1=m[86],i1=m[87],e1=m[88],o1=m[89],a1=m[90],n1=m[91],h1=m[92],f1=m[93],t1=m[94],r1=m[95],s1=m[96],_1=m[97],u1=m[98],v1=m[99],d1=m[100],w1=m[101],y1=m[102],P1=m[103],p1=m[104],g1=m[105],z1=m[106],m1=m[107],x1=m[108],E1=m[109],I1=m[110],R1=m[111],F1=m[112],j1=m[113],C1=m[114],k1=m[115],L1=m[116],T1=m[117],S1=m[118],M1=m[119],B1=m[120],q1=m[121],A1=m[122],G1=m[123],O1=m[124],N1=m[125],D1=m[126],$1=m[127],Q1=m[128],U1=m[129],Y1=m[130],H1=m[131],K1=m[132],W1=m[133],J1=m[134],V1=m[135],Z1=m[136],X1=m[137],b5=m[138],l5=m[139],c5=m[140],i5=m[141],e5=m[142],o5=m[143],a5=m[144],n5=m[145],h5=m[146],f5=m[147],t5=m[148],r5=m[149],s5=m[150],_5=m[151],u5=m[152],v5=m[153],d5=m[154],w5=m[155],y5=m[156],P5=m[157],p5=m[158],g5=m[159],z5=m[160],m5=m[161],x5=m[162],E5=m[163],I5=m[164],R5=m[165],F5=m[166],j5=m[167],C5=m[168],k5=m[169],L5=m[170],T5=m[171],S5=m[172],M5=m[173],B5=m[174],q5=m[175],A5=m[176],G5=m[177],O5=m[178],N5=m[179],D5=m[180],$5=m[181],Q5=m[182],U5=m[183],Y5=m[184],H5=m[185],K5=m[186],W5=m[187],J5=m[188],V5=m[189],Z5=m[190],X5=m[191],b7=m[192],l7=m[193],c7=m[194],i7=m[195],e7=m[196],o7=m[197],a7=m[198],n7=m[199],h7=m[200],f7=m[201],t7=m[202],r7=m[203],s7=m[204],_7=m[205],u7=m[206],v7=m[207],d7=m[208],w7=m[209],y7=m[210],P7=m[211],p7=m[212],g7=m[213],z7=m[214],m7=m[215],x7=m[216],E7=m[217],I7=m[218],R7=m[219],F7=m[220],j7=m[221],C7=m[222],k7=m[223],L7=m[224],T7=m[225],S7=m[226],M7=m[227],B7=m[228],q7=m[229],A7=m[230],G7=m[231],O7=m[232],N7=m[233],D7=m[234],$7=m[235],Q7=m[236],U7=m[237],Y7=m[238],H7=m[239],K7=m[240],W7=m[241],J7=m[242],V7=m[243],Z7=m[244],X7=m[245],b3=m[246],l3=m[247],c3=m[248],i3=m[249],e3=m[250],o3=m[251],a3=m[252],n3=m[253],h3=m[254],f3=m[255],t3=m[256],r3=m[257],s3=m[258],_3=m[259],u3=m[260],v3=m[261],d3=m[262],w3=m[263],y3=m[264],P3=m[265],p3=m[266],g3=m[267],z3=m[268],m3=m[269],x3=m[270],E3=m[271],I3=m[272],R3=m[273],F3=m[274],j3=m[275],C3=m[276],k3=m[277],L3=m[278],T3=m[279],S3=m[280],M3=m[281],B3=m[282],q3=m[283],A3=m[284],G3=m[285],O3=m[286],N3=m[287],D3=m[288],$3=m[289],Q3=m[290],U3=m[291],Y3=m[292],H3=m[293],K3=m[294],W3=m[295],J3=m[296],V3=m[297],Z3=m[298],X3=m[299],b4=m[300],l4=m[301],c4=m[302],i4=m[303],e4=m[304],o4=m[305],a4=m[306],n4=m[307],h4=m[308],f4=m[309],t4=m[310],r4=m[311],s4=m[312],_4=m[313],u4=m[314],v4=m[315],d4=m[316],w4=m[317],y4=m[318],P4=m[319],p4=m[320],g4=m[321],z4=m[322],m4=m[323],x4=m[324],E4=m[325],I4=m[326],R4=m[327],F4=m[328],j4=m[329],C4=m[330],k4=m[331],L4=m[332],T4=m[333],S4=m[334],M4=m[335],B4=m[336],q4=m[337],A4=m[338],G4=m[339],O4=m[340],N4=m[341],D4=m[342],$4=m[343],Q4=m[344],U4=m[345],Y4=m[346],H4=m[347],K4=m[348],W4=m[349],J4=m[350],V4=m[351],Z4=m[352],X4=m[353],b6=m[354],l6=m[355],c6=m[356],i6=m[357],e6=m[358],o6=m[359],a6=m[360],n6=m[361],h6=m[362],f6=m[363],t6=m[364],r6=m[365],s6=m[366],_6=m[367],u6=m[368],v6=m[369],d6=m[370],w6=m[371],y6=m[372],P6=m[373],p6=m[374],g6=m[375],z6=m[376],m6=m[377],x6=m[378],E6=m[379],I6=m[380],R6=m[381],F6=m[382],j6=m[383],C6=m[384],k6=m[385],L6=m[386],T6=m[387],S6=m[388],M6=m[389],B6=m[390],q6=m[391],A6=m[392],G6=m[393],O6=m[394],N6=m[395],D6=m[396],$6=m[397],Q6=m[398],U6=m[399],Y6=m[400],H6=m[401],K6=m[402],W6=m[403],J6=m[404],V6=m[405],Z6=m[406],X6=m[407],b0=m[408],l0=m[409],c0=m[410],i0=m[411],e0=m[412],o0=m[413],a0=m[414],n0=m[415],h0=m[416],f0=m[417],t0=m[418],r0=m[419],s0=m[420],_0=m[421],u0=m[422],v0=m[423],d0=m[424],w0=m[425],y0=m[426],P0=m[427],p0=m[428],g0=m[429],z0=m[430],m0=m[431],x0=m[432],E0=m[433],I0=m[434],R0=m[435],F0=m[437],j0=m[438],C0=m[439],k0=m[440],L0=m[441],T0=m[442],S0=m[443],M0=m[444],B0=m[445],q0=m[446],A0=m[447],G0=m[448],O0=m[449],N0=m[450],D0=m[451],$0=m[452],Q0=m[453],U0=m[454],Y0=m[455],H0=m[456],K0=m[457],W0=m[458],J0=m[459],V0=m[460],Z0=m[461],X0=m[462],b8=m[463],l8=m[464],c8=m[465],i8=m[466],e8=m[467],o8=m[468],a8=m[469],n8=m[470],h8=m[471],f8=m[472],t8=m[473],r8=m[474],s8=m[475],_8=m[476],u8=m[477],v8=m[478],d8=m[479],w8=m[480],y8=m[481],P8=m[482],p8=m[483],g8=m[484],z8=m[485],m8=m[486],x8=m[487],E8=m[488],I8=m[489],R8=m[490],F8=m[491],j8=m[492],C8=m[493],k8=m[494],L8=m[495],T8=m[496],S8=m[497],M8=m[498],B8=m[499],q8=m[500],A8=m[501],G8=m[502],O8=m[503],N8=m[504],D8=m[505],$8=m[506],Q8=m[507],U8=m[508],Y8=m[509],H8=m[510],K8=m[511],W8=m[512],J8=m[513],V8=m[514],Z8=m[515],X8=m[516],b9=m[517],l9=m[518],c9=m[519],i9=m[520],e9=m[521],o9=m[522],a9=m[523],n9=m[524],h9=m[525],f9=m[526],t9=m[527],r9=m[528],s9=m[529],_9=m[530],u9=m[531],v9=m[532],d9=m[533],w9=m[534],y9=m[535],P9=m[536],p9=m[537],g9=m[538],z9=m[539],m9=m[540],x9=m[541],E9=m[542],I9=m[543],R9=m[544],F9=m[545],j9=m[546],C9=m[547],k9=m[548],L9=m[549],T9=m[550],S9=m[551],M9=m[552],B9=m[553],q9=m[554],A9=m[555],G9=m[556],O9=m[557],N9=m[558],D9=m[559],$9=m[560],Q9=m[561],U9=m[562],Y9=m[563],H9=m[564],K9=m[565],W9=m[566],J9=m[567],V9=m[568],Z9=m[569],X9=m[570],bb=m[571],lb=m[572],cb=m[573],ib=m[574],eb=m[575],ob=m[576],ab=m[577],nb=m[578],hb=m[579],fb=m[580],tb=m[581],rb=m[582],sb=m[583],_b=m[584],ub=m[585],vb=m[586],db=m[587],wb=m[588],yb=m[589],Pb=m[590],pb=m[591],gb=m[592],zb=m[593],mb=m[594],xb=m[595],Eb=m[596],Ib=m[597],Rb=m[598],Fb=m[599],jb=m[600],Cb=m[601],kb=m[602],Lb=m[603],Tb=m[604],Sb=m[605],Mb=m[606],Bb=m[607],qb=m[608],Ab=m[609],Gb=m[610],Ob=m[611],Nb=m[612],Db=m[613],$b=m[614],Qb=m[615],Ub=m[616],Yb=m[617],Hb=m[618],Kb=m[619],Wb=m[620],Jb=m[621],Vb=m[622],Zb=m[623],Xb=m[624],bl=m[625],ll=m[626],cl=m[627],il=m[628],el=m[629],ol=m[630],al=m[631],nl=m[632],hl=m[633],fl=m[634],tl=m[635],rl=m[636],sl=m[637],_l=m[638],ul=m[639],vl=m[640],dl=m[641],wl=m[642],yl=m[643],Pl=m[644],pl=m[645],gl=m[646],zl=m[647],ml=m[648],xl=m[649],El=m[650],Il=m[651],Rl=m[652],Fl=m[653],jl=m[654],Cl=m[655],kl=m[656],Ll=m[657],Tl=m[658],Sl=m[659],Ml=m[660],Bl=m[661],ql=m[662],Al=m[663],Gl=m[664],Ol=m[665],Nl=m[666],Dl=m[667],$l=m[668],Ql=m[669],Ul=m[670],Yl=m[671],Hl=m[672],Kl=m[673],Wl=m[674],Jl=m[675],Vl=m[676],Zl=m[677],Xl=m[678],bc=m[679],lc=m[680],cc=m[681],ic=m[682],ec=m[683],oc=m[684],ac=m[685],nc=m[686],hc=m[687],fc=m[688],tc=m[689],rc=m[690],sc=m[691],_c=m[692],uc=m[693],vc=m[694],dc=m[695],wc=m[696],yc=m[697],Pc=m[698],pc=m[699],gc=m[700],zc=m[701],mc=m[702],xc=m[703],Ec=m[704],Ic=m[705],Rc=m[706],Fc=m[707],jc=m[708],Cc=m[709],kc=m[710],Lc=m[711],Tc=m[712],Sc=m[713],Mc=m[714],Bc=m[715],qc=m[716],Ac=m[717],Gc=m[718],Oc=m[719],Nc=m[720],Dc=m[721],$c=m[722],Qc=m[723],Uc=m[724],Yc=m[725],Hc=m[726],Kc=m[727],Wc=m[728],Jc=m[729],Vc=m[730],Zc=m[731],Xc=m[732],bi=m[733],li=m[734],ci=m[735],ii=m[736],ei=m[737],oi=m[738],ai=m[739],ni=m[740],hi=m[741],fi=m[742],ti=m[743],ri=m[744],si=m[745],_i=m[746],ui=m[747],vi=m[748],di=m[749],wi=m[750],yi=m[751],Pi=m[752],pi=m[753],gi=m[754],zi=m[755],mi=m[756],xi=m[757],Ei=m[758],Ii=m[759],Ri=m[760],Fi=m[761],ji=m[762],Ci=m[763],ki=m[764],Li=m[765],Ti=m[766],Si=m[767],Mi=m[768],Bi=m[769],qi=m[770],Ai=m[771],Gi=m[772],Oi=m[773],Ni=m[774],Di=m[775],$i=m[776],Qi=m[777],Ui=m[778],Yi=m[779],Hi=m[780],Ki=m[781],Wi=m[782],Ji=m[783],Vi=m[784],Zi=m[785],Xi=m[786],be=m[787],le=m[788],ce=m[789],ie=m[790],ee=m[791],oe=m[792],ae=m[793],ne=m[794],he=m[795],fe=m[796],te=m[797],re=m[798],se=m[799],_e=m[800],ue=m[801],ve=m[802],de=m[803],we=m[804],ye=m[805],Pe=m[806],pe=m[807],ge=m[808],ze=m[809],me=m[810],xe=m[811],Ee=m[812],Ie=m[813],Re=m[814],Fe=m[815],je=m[816],Ce=m[817],ke=m[818],Le=m[819],Te=m[820],Se=m[821],Me=m[822],Be=m[823],qe=m[824],Ae=m[825],Ge=m[826],Oe=m[827],Ne=m[828],De=m[829],$e=m[830],Qe=m[831],Ue=m[832],Ye=m[833],He=m[834],Ke=m[835],We=m[836],Je=m[837],Ve=m[838],Ze=m[839],Xe=m[840],bo=m[841],lo=m[842],co=m[843],io=m[844],eo=m[845],oo=m[847],ao=m[848],no=m[849],ho=m[850],fo=m[851],to=m[852],ro=m[853],so=m[854],_o=m[855],uo=m[856],vo=m[857],wo=m[858],yo=m[859],Po=m[860],po=m[861],go=m[862],zo=m[863],mo=m[864],xo=m[865],Eo=m[866],Io=m[867],Ro=m[868],Fo=m[869],jo=m[870],Co=m[871],ko=m[872],Lo=m[873],To=m[874],So=m[875],Mo=m[876],Bo=m[877],qo=m[878],Ao=m[879],Go=m[880],Oo=m[881],No=m[882],Do=m[883],$o=m[884],Qo=m[885],Uo=m[886],Yo=m[887],Ho=m[888],Ko=m[889],Wo=m[890],Jo=m[891],Vo=m[892],Zo=m[893],Xo=m[894],ba=m[895],la=m[896],ca=m[897],ia=m[898],ea=m[899],oa=m[900],aa=m[901],na=m[902],ha=m[903],fa=m[904],ta=m[905],ra=m[906],sa=m[907],_a=m[908],ua=m[909],va=m[910],da=m[911],wa=m[912],ya=m[913],Pa=m[914],pa=m[915],ga=m[916],za=m[917],ma=m[918],xa=m[919],Ea=m[920],Ia=m[922],Ra=m[923],Fa=m[924],ja=m[925],Ca=m[926],ka=m[927],La=m[928],Ta=m[929],Sa=m[930],Ma=m[931],Ba=m[932],qa=m[933],Aa=m[934],Ga=m[935],Oa=m[936],Na=m[937],Da=m[938],$a=m[939],Qa=m[940],Ua=m[941],Ya=m[942],Ha=m[943],Ka=m[944],Wa=m[945],Ja=m[946],Va=m[947],Za=m[948],Xa=m[949],bn=m[950],ln=m[951],cn=m[952],en=m[953],on=m[954],an=m[955],nn=m[956],hn=m[957],fn=m[958],tn=m[959],rn=m[960],sn=m[961],_n=m[962],un=m[963],vn=m[964],dn=m[965],wn=m[966],yn=m[967],Pn=m[968],pn=m[969],gn=m[970],zn=m[971],mn=m[972],xn=m[973],En=m[974],In=m[975],Rn=m[976],Fn=m[977],jn=m[978],Cn=m[979],kn=m[980],Ln=m[981],Tn=m[982],Sn=m[983],Mn=m[984],Bn=m[985],qn=m[986],An=m[987],Gn=m[988],On=m[989],Nn=m[990],Dn=m[991],$n=m[992],Qn=m[993],Un=m[994],Yn=m[995],Hn=m[996],Kn=m[997],Wn=m[998],Jn=m[999],Vn=m[1e3],Zn=m[1001],Xn=m[1002],bh=m[1003],lh=m[1004],ch=m[1005],ih=m[1006],eh=m[1007],oh=m[1008],ah=m[1009],nh=m[1010],hh=m[1011],fh=m[1012],th=m[1013],rh=m[1014],sh=m[1015],_h=m[1016],uh=m[1017],vh=m[1018],dh=m[1019],wh=m[1020],yh=m[1021],Ph=m[1022],ph=m[1023],gh=m[1024],zh=m[1025],mh=m[1026],xh=m[1027],Eh=m[1028],Ih=m[1029],Rh=m[1030],Fh=m[1031],jh=m[1032],Ch=m[1033],kh=m[1034],Lh=m[1035],Th=m[1036],Sh=m[1037],Mh=m[1038],Bh=m[1039],qh=m[1040],Ah=m[1041],Gh=m[1042],Oh=m[1043],Nh=m[1044],Dh=m[1045],$h=m[1046],Qh=m[1047],Uh=m[1048],Yh=m[1049],Hh=m[1050],Kh=m[1051],Wh=m[1052],Jh=m[1053],Vh=m[1054],Zh=m[1055],Xh=m[1056],bf=m[1057],lf=m[1058],cf=m[1059],ef=m[1060],of=m[1061],af=m[1062],nf=m[1063],hf=m[1064],ff=m[1065],tf=m[1066],rf=m[1067],sf=m[1068],_f=m[1069],uf=m[1070],vf=m[1071],df=m[1072],wf=m[1073],yf=m[1074],Pf=m[1075],pf=m[1076],gf=m[1077],zf=m[1078],mf=m[1079],xf=m[1080],Ef=m[1081],If=m[1082],Rf=m[1083],Ff=m[1084],jf=m[1085],Cf=m[1086],kf=m[1087],Lf=m[1088],Tf=m[1089],Sf=m[1090],Mf=m[1091],Bf=m[1092],qf=m[1093],Af=m[1094],Gf=m[1095],Of=m[1096],Nf=m[1097],Df=m[1098],$f=m[1099],Qf=m[1100],Uf=m[1101],Yf=m[1102],Hf=m[1103],Kf=m[1104],Wf=m[1105],Jf=m[1106],Vf=m[1107],Zf=m[1108],Xf=m[1109],bt=m[1110],lt=m[1111],ct=m[1112],it=m[1113],et=m[1114],ot=m[1115],at=m[1116],nt=m[1117],ht=m[1118],ft=m[1119],tt=m[1120],rt=m[1121],st=m[1122],_t=m[1123],ut=m[1124],vt=m[1125],dt=m[1126],wt=m[1127],yt=m[1128],Pt=m[1129],pt=m[1130],gt=m[1131],zt=m[1132],mt=m[1133],xt=m[1134],Et=m[1135],It=m[1136],Rt=m[1137],Ft=m[1138],jt=m[1139],Ct=m[1140],kt=m[1141],Lt=m[1142],Tt=m[1143],St=m[1144],Mt=m[1145],Bt=m[1146],qt=m[1147],At=m[1148],Gt=m[1149],Ot=m[1150],Nt=m[1151],Dt=m[1152],$t=m[1153],Qt=m[1154],Ut=m[1155],Yt=m[1156],Ht=m[1157],Kt=m[1158],Wt=m[1159],Jt=m[1160],Vt=m[1161],Zt=m[1162],Xt=m[1163],br=m[1164],lr=m[1165],cr=m[1166],ir=m[1167],er=m[1168],or=m[1169],ar=m[1170],nr=m[1171],hr=m[1172],fr=m[1173],tr=m[1174],rr=m[1175],sr=m[1176],_r=m[1177],ur=m[1178],vr=m[1179],dr=m[1180],wr=m[1181],yr=m[1182],Pr=m[1183],pr=m[1184],gr=m[1185],zr=m[1186],mr=m[1187],xr=m[1188],Er=m[1189],Ir=m[1190],Rr=m[1191],Fr=m[1192],jr=m[1193],Cr=m[1194],kr=m[1195],Lr=m[1196],Tr=m[1197],Sr=m[1198],Mr=m[1199],Br=m[1200],qr=m[1201],Ar=m[1202],Gr=m[1203],Or=m[1204],Nr=m[1205],Dr=m[1206],$r=m[1207],Qr=m[1208],Ur=m[1209],Yr=m[1210],Hr=m[1211],Kr=m[1212],Wr=m[1213],Jr=m[1214],Vr=m[1215],Zr=m[1216],Xr=m[1217],bs=m[1218],ls=m[1219],cs=m[1220],is=m[1221],es=m[1222],os=m[1223],as=m[1224],ns=m[1225],hs=m[1226],fs=m[1227],ts=m[1228],rs=m[1229],ss=m[1230],_s=m[1231],us=m[1232],vs=m[1233],ds=m[1234],ws=m[1235],ys=m[1236],Ps=m[1237],ps=m[1238],gs=m[1239],zs=m[1240],ms=m[1241],xs=m[1242],Es=m[1243],Is=m[1244],Rs=m[1245],Fs=m[1246],js=m[1247],Cs=m[1248],ks=m[1249],Ls=m[1250],Ts=m[1251],Ss=m[1252],Ms=m[1253],Bs=m[1254],qs=m[1255],As=m[1256],Gs=m[1257],Os=m[1258],Ns=m[1259],Ds=m[1260],$s=m[1261],Qs=m[1262],Us=m[1263],Ys=m[1264],Hs=m[1265],Ks=m[1266],Ws=m[1267],Js=m[1268],Vs=m[1269],Zs=m[1270],Xs=m[1271],b_=m[1272],l_=m[1273],c_=m[1274],i_=m[1275],e_=m[1276],o_=m[1277],a_=m[1278],n_=m[1279],h_=m[1280],f_=m[1281],t_=m[1282],r_=m[1283],s_=m[1284],__=m[1285],u_=m[1286],v_=m[1287],d_=m[1288],w_=m[1289],y_=m[1290],P_=m[1291],p_=m[1292],g_=m[1293],z_=m[1294],m_=m[1295],x_=m[1296],E_=m[1297],I_=m[1298],R_=m[1299],F_=m[1300],j_=m[1301],C_=m[1302],k_=m[1303],L_=m[1304],T_=m[1305],S_=m[1306],M_=m[1307],B_=m[1308],q_=m[1309],A_=m[1310],G_=m[1311],O_=m[1312],N_=m[1313],D_=m[1314],$_=m[1315],Q_=m[1316],U_=m[1317],Y_=m[1318],H_=m[1319],K_=m[1320],W_=m[1321],J_=m[1322],V_=m[1323],Z_=m[1324],X_=m[1325],bu=m[1326],lu=m[1327],cu=m[1328],iu=m[1329],eu=m[1330],ou=m[1331],au=m[1332],nu=m[1333],hu=m[1334],fu=m[1335],tu=m[1336],ru=m[1337],su=m[1338],_u=m[1339],uu=m[1340],vu=m[1341],du=m[1342],wu=m[1343],yu=m[1344],Pu=m[1345],pu=m[1346],gu=m[1347],zu=m[1348],mu=m[1349],xu=m[1350],Eu=m[1351],Iu=m[1352],Ru=m[1353],Fu=m[1354],ju=m[1355],Cu=m[1356],ku=m[1357],Lu=m[1358],Tu=m[1359],Su=m[1360],Mu=m[1361],Bu=m[1362],qu=m[1363],Au=m[1364],Gu=m[1365],Ou=m[1366],Nu=m[1367],Du=m[1368],$u=m[1369],Qu=m[1370],Uu=m[1371],Yu=m[1372],Hu=m[1373],Ku=m[1374],Wu=m[1375],Ju=m[1376],Vu=m[1377],Zu=m[1378],Xu=m[1379],bv=m[1380],lv=m[1381],cv=m[1382],iv=m[1383],ev=m[1384],ov=m[1385],av=m[1386],nv=m[1387],hv=m[1388],fv=m[1389],tv=m[1390],rv=m[1391],sv=m[1392],_v=m[1393],uv=m[1394],vv=m[1395],dv=m[1396],wv=m[1397],yv=m[1398],Pv=m[1399],pv=m[1400],gv=m[1401],zv=m[1402],mv=m[1403],xv=m[1404],Ev=m[1405],Iv=m[1406],Rv=m[1407],Fv=m[1408],jv=m[1409],Cv=m[1410],kv=m[1411],Lv=m[1412],Tv=m[1413],Sv=m[1414],Mv=m[1415],Bv=m[1416],qv=m[1417],Av=m[1418],Gv=m[1419],Ov=m[1420],Nv=m[1421],Dv=m[1422],$v=m[1423],Qv=m[1424],Uv=m[1425],Yv=m[1426],Hv=m[1427],Kv=m[1428],Wv=m[1429],Jv=m[1430],Vv=m[1431],Zv=m[1432],Xv=m[1433],bd=m[1434],ld=m[1435],cd=m[1436],id=m[1437],ed=m[1438],od=m[1439],ad=m[1440],nd=m[1441],hd=m[1442],fd=m[1443],td=m[1444],rd=m[1445],sd=m[1446],_d=m[1447],ud=m[1448],vd=m[1449],dd=m[1450],wd=m[1451],yd=m[1452],Pd=m[1453],pd=m[1454],gd=m[1455],zd=m[1456],md=m[1457],xd=m[1458],Ed=m[1459],Id=m[1460],Rd=m[1461],Fd=m[1462],jd=m[1463],Cd=m[1464],kd=m[1465],Ld=m[1466],Td=m[1467],Sd=m[1468],Md=m[1469],Bd=m[1470],qd=m[1471],Ad=m[1472],Gd=m[1473],Od=m[1474],Nd=m[1475],Dd=m[1476],$d=m[1477],Qd=m[1478],Ud=m[1479],Yd=m[1480],Hd=m[1481],Kd=m[1482],Wd=m[1483],Jd=m[1484],Vd=m[1485],Zd=m[1486],Xd=m[1487],bw=m[1488],lw=m[1489],cw=m[1490],iw=m[1491],ew=m[1492],ow=m[1493],aw=m[1494],nw=m[1495],hw=m[1496],fw=m[1497],tw=m[1498],rw=m[1499],sw=m[1500],_w=m[1501],uw=m[1502],vw=m[1503],dw=m[1504],ww=m[1505],yw=m[1506],Pw=m[1507],pw=m[1508],gw=m[1509],zw=m[1510],mw=m[1511],xw=m[1512],Ew=m[1513],Iw=m[1514],Rw=m[1515],Fw=m[1516],jw=m[1517],Cw=m[1518],kw=m[1519],Lw=m[1520],Tw=m[1521],Sw=m[1522],Mw=m[1523],Bw=m[1524],qw=m[1525],Aw=m[1526],Gw=m[1527],Ow=m[1528],Nw=m[1529],Dw=m[1530],$w=m[1531],Qw=m[1532],Uw=m[1533],Yw=m[1534],Hw=m[1535],Kw=m[1536],Ww=m[1537],Jw=m[1538],Vw=m[1539],Zw=m[1540],Xw=m[1541],by=m[1542],ly=m[1543],cy=m[1544],iy=m[1545],ey=m[1546],oy=m[1547],ay=m[1548],ny=m[1549],hy=m[1550],fy=m[1551],ty=m[1552],ry=m[1553],sy=m[1554],_y=m[1555],uy=m[1556],vy=m[1557],dy=m[1558],wy=m[1559],yy=m[1560],Py=m[1561],py=m[1562],gy=m[1563],zy=m[1564],my=m[1565],xy=m[1566],Ey=m[1567],Iy=m[1568],Ry=m[1569],Fy=m[1570],jy=m[1571],Cy=m[1572],ky=m[1573],Ly=m[1574],Ty=m[1575],Sy=m[1576],My=m[1577],By=m[1578],qy=m[1579],Ay=m[1580],Gy=m[1581],Oy=m[1582],Ny=m[1583],Dy=m[1584],$y=m[1585],Qy=m[1586],Uy=m[1587],Yy=m[1588],Hy=m[1589],Ky=m[1590],Wy=m[1591],Jy=m[1592],Vy=m[1593],Zy=m[1594],Xy=m[1595],bP=m[1596],lP=m[1597],cP=m[1598],iP=m[1599],eP=m[1600],oP=m[1601],aP=m[1602],nP=m[1603],hP=m[1604],fP=m[1605],tP=m[1606],rP=m[1607],sP=m[1608],_P=m[1609],uP=m[1610],vP=m[1611],dP=m[1612],wP=m[1613],yP=m[1614],PP=m[1615],pP=m[1616],gP=m[1617],zP=m[1618],mP=m[1619],xP=m[1620],EP=m[1621],IP=m[1622],RP=m[1623],FP=m[1624],jP=m[1625],CP=m[1626],kP=m[1627],LP=m[1628],TP=m[1629],SP=m[1630],MP=m[1631],BP=m[1632],qP=m[1633],AP=m[1634],GP=m[1635],OP=m[1636],NP=m[1637],DP=m[1638],$P=m[1639],QP=m[1640],UP=m[1641],YP=m[1642],HP=m[1643],KP=m[1644],WP=m[1645],JP=m[1646],VP=m[1647],ZP=m[1648],XP=m[1649],bp=m[1650],lp=m[1651],cp=m[1652],ip=m[1653],ep=m[1654],op=m[1655],ap=m[1656],np=m[1657],hp=m[1658],fp=m[1659],tp=m[1660],rp=m[1661],sp=m[1662],_p=m[1663],up=m[1664],vp=m[1665],dp=m[1666],wp=m[1667],yp=m[1668],Pp=m[1669],pp=m[1670],gp=m[1671],zp=m[1672],mp=m[1673],xp=m[1674],Ep=m[1675],Ip=m[1676],Rp=m[1677],Fp=m[1678],jp=m[1679],Cp=m[1680],kp=m[1681],Lp=m[1682],Tp=m[1683],Sp=m[1684],Mp=m[1685],Bp=m[1686],qp=m[1687],Ap=m[1688],Gp=m[1689],Op=m[1690],Np=m[1691],Dp=m[1692],$p=m[1693],Qp=m[1694],Up=m[1695],Yp=m[1696],Hp=m[1697],Kp=m[1698],Wp=m[1699],Jp=m[1700],Vp=m[1701],Zp=m[1702],Xp=m[1703],bg=m[1704],lg=m[1705],cg=m[1706],ig=m[1707],eg=m[1708],og=m[1709],ag=m[1710],ng=m[1711],hg=m[1712],fg=m[1713],tg=m[1714],rg=m[1715],sg=m[1716],_g=m[1717],ug=m[1718],vg=m[1719],dg=m[1720],wg=m[1721],yg=m[1722],Pg=m[1723],pg=m[1724],gg=m[1725],zg=m[1726],mg=m[1727],xg=m[1728],Eg=m[1729],Ig=m[1730],Rg=m[1731],Fg=m[1732],jg=m[1733],Cg=m[1734],kg=m[1735],Lg=m[1736],Tg=m[1737],Sg=m[1738],Mg=m[1739],Bg=m[1740],qg=m[1741],Ag=m[1742],Gg=m[1743],Og=m[1744],Ng=m[1745],Dg=m[1746],$g=m[1747],Qg=m[1748],Ug=m[1749],Yg=m[1750],Hg=m[1751],Kg=m[1752],Wg=m[1753],Jg=m[1754],Vg=m[1755],Zg=m[1756],Xg=m[1757],bz=m[1758],lz=m[1759],cz=m[1760],iz=m[1761],ez=m[1762],oz=m[1763],az=m[1764],nz=m[1765],hz=m[1766],fz=m[1767],tz=m[1768],rz=m[1769],sz=m[1770],_z=m[1771],uz=m[1772],vz=m[1773],dz=m[1774],wz=m[1775],yz=m[1776],Pz=m[1777],pz=m[1778],gz=m[1779],zz=m[1780],mz=m[1781],xz=m[1782],Ez=m[1783],Iz=m[1784],Rz=m[1785],Fz=m[1786],jz=m[1787],Cz=m[1788],kz=m[1789],Lz=m[1790],Tz=m[1791],Sz=m[1792],Mz=m[1793],Bz=m[1794],qz=m[1795],Az=m[1796],Gz=m[1797],Oz=m[1798],Nz=m[1799],Dz=m[1800],$z=m[1801],Qz=m[1802],Uz=m[1803],Yz=m[1804],Hz=m[1805],Kz=m[1806],Wz=m[1807],Jz=m[1808],Vz=m[1809],Zz=m[1810],Xz=m[1811],bm=m[1812],lm=m[1813],cm=m[1814],im=m[1815],em=m[1816],om=m[1817],am=m[1818],nm=m[1819],hm=m[1820],fm=m[1821],tm=m[1822],rm=m[1823],sm=m[1824],_m=m[1825],um=m[1826],vm=m[1827],dm=m[1828],wm=m[1829],ym=m[1830],Pm=m[1831],pm=m[1832],gm=m[1833],zm=m[1834],mm=m[1835],xm=m[1836],Em=m[1837],Im=m[1838],Rm=m[1839],Fm=m[1840],jm=m[1841],Cm=m[1842],km=m[1843],Lm=m[1844],Tm=m[1845],Sm=m[1846],Mm=m[1847],Bm=m[1848],qm=m[1849],Am=m[1850],Gm=m[1851],Om=m[1852],Nm=m[1853],Dm=m[1854],$m=m[1855],Qm=m[1856],Um=m[1857],Ym=m[1858],Hm=m[1859],Km=m[1860],Wm=m[1861],Jm=m[1862],Vm=m[1863],Zm=m[1864],Xm=m[1865],bx=m[1866],lx=m[1867],cx=m[1868],ix=m[1869],ex=m[1870],ox=m[1871],ax=m[1872],nx=m[1873],hx=m[1874],fx=m[1875],tx=m[1876],rx=m[1877],sx=m[1878],_x=m[1879],ux=m[1880],vx=m[1881],dx=m[1882],wx=m[1883],yx=m[1884],Px=m[1885],px=m[1886],gx=m[1887],zx=m[1888],mx=m[1889],xx=m[1890],Ex=m[1891],Ix=m[1892],Rx=m[1893],Fx=m[1894],jx=m[1895],Cx=m[1896],kx=m[1897],Lx=m[1898],Tx=m[1899],Sx=m[1900],Mx=m[1901],Bx=m[1902],qx=m[1903],Ax=m[1904],Gx=m[1905],Ox=m[1906],Nx=m[1907],Dx=m[1908],$x=m[1909],Qx=m[1910],Ux=m[1911],Yx=m[1912],Hx=m[1913],Kx=m[1914],Wx=m[1915],Jx=m[1916],Vx=m[1917],Zx=m[1918],Xx=m[1919],bE=m[1920],lE=m[1921],cE=m[1922],iE=m[1923],eE=m[1924],oE=m[1925],aE=m[1926],nE=m[1927],hE=m[1928],fE=m[1929],tE=m[1930],rE=m[1931],sE=m[1932],_E=m[1933],uE=m[1934],vE=m[1935],dE=m[1936],wE=m[1937],yE=m[1938],PE=m[1939],pE=m[1940],gE=m[1941],zE=m[1942],mE=m[1943],xE=m[1944],EE=m[1945],IE=m[1946],RE=m[1947],FE=m[1948],jE=m[1949],CE=m[1950],kE=m[1951],LE=m[1952],TE=m[1953],SE=m[1954],ME=m[1955],BE=m[1956],qE=m[1957],AE=m[1958],GE=m[1959],OE=m[1960],NE=m[1961],DE=m[1962],$E=m[1963],QE=m[1964],UE=m[1965],YE=m[1966],HE=m[1967],KE=m[1968],WE=m[1969],JE=m[1970],VE=m[1971],ZE=m[1972],XE=m[1973],bI=m[1974],lI=m[1975],cI=m[1976],iI=m[1977],eI=m[1978],oI=m[1979],aI=m[1980],nI=m[1981],hI=m[1982],fI=m[1983],tI=m[1984],rI=m[1985],sI=m[1986],_I=m[1987],uI=m[1988],vI=m[1989],dI=m[1990],wI=m[1991],yI=m[1992],PI=m[1993],pI=m[1994],gI=m[1995],zI=m[1996],mI=m[1997],xI=m[1998],EI=m[1999],II=m[2e3],RI=m[2001],FI=m[2002],jI=m[2003],CI=m[2004],kI=m[2005],LI=m[2006],TI=m[2007],SI=m[2008],MI=m[2009],BI=m[2010],qI=m[2011],AI=m[2012],GI=m[2013],OI=m[2014],NI=m[2015],DI=m[2016],$I=m[2017],QI=m[2018],UI=m[2019],YI=m[2020],HI=m[2021],KI=m[2022],WI=m[2023],JI=m[2024],VI=m[2025],ZI=m[2026],XI=m[2027],bR=m[2028],lR=m[2029],cR=m[2030],iR=m[2031],eR=m[2032],oR=m[2033],aR=m[2034],nR=m[2035],hR=m[2036],fR=m[2037],tR=m[2038],rR=m[2039],sR=m[2040],_R=m[2041],uR=m[2042],vR=m[2043],dR=m[2044],wR=m[2045],yR=m[2046],PR=m[2047],pR=m[2048],gR=m[2049],zR=m[2050],mR=m[2051],xR=m[2052],ER=m[2053],IR=m[2054],RR=m[2055],FR=m[2056],jR=m[2057],CR=m[2058],kR=m[2059],LR=m[2060],TR=m[2061],SR=m[2062],MR=m[2063],BR=m[2064],qR=m[2065],AR=m[2066],GR=m[2067],OR=m[2068],NR=m[2069],DR=m[2070],$R=m[2071],QR=m[2072],UR=m[2073],YR=m[2074],HR=m[2075],KR=m[2076],WR=m[2077],JR=m[2078],VR=m[2079],ZR=m[2080],XR=m[2081],bF=m[2082],lF=m[2083],cF=m[2084],iF=m[2085],eF=m[2086],oF=m[2087],aF=m[2088],nF=m[2089],hF=m[2090],fF=m[2091],tF=m[2092],rF=m[2093],sF=m[2094],_F=m[2095],uF=m[2096],vF=m[2097],dF=m[2098],wF=m[2099],yF=m[2100],PF=m[2101],pF=m[2102],gF=m[2103],zF=m[2104],mF=m[2105],xF=m[2106],EF=m[2107],IF=m[2108],RF=m[2109],FF=m[2110],jF=m[2111],CF=m[2112],kF=m[2113],LF=m[2114],TF=m[2115],SF=m[2116],MF=m[2117],BF=m[2118],qF=m[2119],AF=m[2120],GF=m[2121],OF=m[2122],NF=m[2123],DF=m[2124],$F=m[2125],QF=m[2126],UF=m[2127],YF=m[2128],HF=m[2129],KF=m[2130],WF=m[2131],JF=m[2132],VF=m[2133],ZF=m[2134],XF=m[2135],bj=m[2136],lj=m[2137],cj=m[2138],ij=m[2139],ej=m[2140],oj=m[2141],aj=m[2142],nj=m[2143],hj=m[2144],fj=m[2145],tj=m[2146],rj=m[2147],sj=m[2148],_j=m[2149],uj=m[2150],vj=m[2151],dj=m[2152],wj=m[2153],yj=m[2154],Pj=m[2155],pj=m[2156],gj=m[2157],zj=m[2158],mj=m[2159],xj=m[2160],Ej=m[2161],Ij=m[2162],Rj=m[2163],Fj=m[2164],jj=m[2165],Cj=m[2166],kj=m[2167],Lj=m[2168],Tj=m[2169],Sj=m[2170],Mj=m[2171],Bj=m[2172],qj=m[2173],Aj=m[2174],Gj=m[2175],Oj=m[2176],Nj=m[2177],Dj=m[2178],$j=m[2179],Qj=m[2180],Uj=m[2181],Yj=m[2182],Hj=m[2183],Kj=m[2184],Wj=m[2185],Jj=m[2186],Vj=m[2187],Zj=m[2188],Xj=m[2189],bC=m[2190],lC=m[2191],cC=m[2192],iC=m[2193],eC=m[2194],oC=m[2195],aC=m[2196],nC=m[2197],hC=m[2198],fC=m[2199],tC=m[2200],rC=m[2201],sC=m[2202],_C=m[2203],uC=m[2204],vC=m[2205],dC=m[2206],wC=m[2207],yC=m[2208],PC=m[2209],pC=m[2210],gC=m[2211],zC=m[2212],mC=m[2213],xC=m[2214],EC=m[2215],IC=m[2216],RC=m[2217],FC=m[2218],jC=m[2219],CC=m[2220],kC=m[2221],LC=m[2222],TC=m[2223],SC=m[2224],MC=m[2225],BC=m[2226],qC=m[2227],AC=m[2228],GC=m[2229],OC=m[2230],NC=m[2231],DC=m[2232],$C=m[2233],QC=m[2234],UC=m[2235],YC=m[2236],HC=m[2237],KC=m[2238],WC=m[2239],JC=m[2240],VC=m[2241],ZC=m[2242],XC=m[2243],bk=m[2244],lk=m[2245],ck=m[2246],ik=m[2247],ek=m[2248],ok=m[2249],ak=m[2250],nk=m[2251],hk=m[2252],fk=m[2253],tk=m[2254],rk=m[2255],sk=m[2256],_k=m[2257],uk=m[2258],vk=m[2259],dk=m[2260],wk=m[2261],yk=m[2262],Pk=m[2263],pk=m[2264],gk=m[2265],zk=m[2266],mk=m[2267],xk=m[2268],Ek=m[2269],Ik=m[2270],Rk=m[2271],Fk=m[2272],jk=m[2273],Ck=m[2274],kk=m[2275],Lk=m[2276],Tk=m[2277],Sk=m[2278],Mk=m[2279],Bk=m[2280],qk=m[2281],Ak=m[2282],Gk=m[2283],Ok=m[2284],Nk=m[2285],Dk=m[2286],$k=m[2287],Qk=m[2288],Uk=m[2289],Yk=m[2290],Hk=m[2291],Kk=m[2292],Wk=m[2293],Jk=m[2294],Vk=m[2295],Zk=m[2296],Xk=m[2297],bL=m[2298],lL=m[2299],cL=m[2300],iL=m[2301],eL=m[2302],oL=m[2303],aL=m[2304],nL=m[2305],hL=m[2306],fL=m[2307],tL=m[2308],rL=m[2309],sL=m[2310],_L=m[2311],uL=m[2312],vL=m[2313],dL=m[2314],wL=m[2315],yL=m[2316],PL=m[2317],pL=m[2318],gL=m[2319],zL=m[2320],mL=m[2321],xL=m[2322],EL=m[2323],IL=m[2324],RL=m[2325],FL=m[2326],jL=m[2327],CL=m[2328],kL=m[2329],LL=m[2330],TL=m[2331],SL=m[2332],ML=m[2333],BL=m[2334],qL=m[2335],AL=m[2336],GL=m[2337],OL=m[2338],NL=m[2339],DL=m[2340],$L=m[2341],QL=m[2342],UL=m[2343],YL=m[2344],HL=m[2345],KL=m[2346],WL=m[2347],JL=m[2348],VL=m[2349],ZL=m[2350],XL=m[2351],bT=m[2352],lT=m[2353],cT=m[2354],iT=m[2355],eT=m[2356],oT=m[2357],aT=m[2358],nT=m[2359],hT=m[2360],fT=m[2361],tT=m[2362],rT=m[2363],sT=m[2364],_T=m[2365],uT=m[2366],vT=m[2367],dT=m[2368],wT=m[2369],yT=m[2370],PT=m[2371],pT=m[2372],gT=m[2373],zT=m[2374],mT=m[2375],xT=m[2376],ET=m[2377],IT=m[2378],RT=m[2379],FT=m[2380],jT=m[2381],CT=m[2382],kT=m[2383],LT=m[2384],TT=m[2385],ST=m[2386],MT=m[2387],BT=m[2388],qT=m[2389],AT=m[2390],GT=m[2391],OT=m[2392],NT=m[2393],DT=m[2394],$T=m[2395],QT=m[2396],UT=m[2397],YT=m[2398],HT=m[2399],KT=m[2400],WT=m[2401],JT=m[2402],VT=m[2403],ZT=m[2404],XT=m[2405],bS=m[2406],lS=m[2407],cS=m[2408],iS=m[2409],eS=m[2410],oS=m[2411],aS=m[2412],nS=m[2413],hS=m[2414],fS=m[2415],tS=m[2416],rS=m[2417],sS=m[2418],_S=m[2419],uS=m[2420],vS=m[2421],dS=m[2422],wS=m[2423],yS=m[2424],PS=m[2425],pS=m[2426],gS=m[2427],zS=m[2428],mS=m[2429],xS=m[2430],ES=m[2431],IS=m[2432],RS=m[2433],FS=m[2434],jS=m[2435],CS=m[2436],kS=m[2437],LS=m[2438],TS=m[2439],SS=m[2440],MS=m[2441],BS=m[2442],qS=m[2443],AS=m[2444],GS=m[2445],OS=m[2446],NS=m[2447],DS=m[2448],$S=m[2449],QS=m[2450],US=m[2451],YS=m[2452],HS=m[2453],KS=m[2454],WS=m[2455],JS=m[2456],VS=m[2457],ZS=m[2458],XS=m[2459],bM=m[2460],lM=m[2461],cM=m[2462],iM=m[2463],eM=m[2464],oM=m[2465],aM=m[2466],nM=m[2467],hM=m[2468],fM=m[2469],tM=m[2470],rM=m[2471],sM=m[2472],_M=m[2473],uM=m[2474],vM=m[2475],dM=m[2476],wM=m[2477],yM=m[2478],PM=m[2479],pM=m[2480],gM=m[2481],zM=m[2482],mM=m[2483],xM=m[2484],EM=m[2485],IM=m[2486],RM=m[2487],FM=m[2488],jM=m[2489],CM=m[2490],kM=m[2491],LM=m[2492],TM=m[2493],SM=m[2494],MM=m[2495],BM=m[2496],qM=m[2497],AM=m[2498],GM=m[2499],OM=m[2500],NM=m[2501],DM=m[2502],$M=m[2503],QM=m[2504],UM=m[2505],YM=m[2506],HM=m[2508],KM=m[2509],WM=m[2510],JM=m[2511],VM=m[2512],ZM=m[2513],XM=m[2514],bB=m[2515],lB=m[2516],cB=m[2517],iB=m[2518],eB=m[2519],oB=m[2520],aB=m[2521],nB=m[2522],hB=m[2523],fB=m[2524],tB=m[2525],rB=m[2526],sB=m[2527],_B=m[2528],uB=m[2529],vB=m[2530],dB=m[2531],wB=m[2532],yB=m[2533],PB=m[2534],pB=m[2535],gB=m[2536],zB=m[2537],mB=m[2538],xB=m[2539],EB=m[2540],IB=m[2541],RB=m[2542],FB=m[2543],jB=m[2544],CB=m[2545],kB=m[2546],LB=m[2547],TB=m[2548],SB=m[2549],MB=m[2550],BB=m[2551],qB=m[2552],AB=m[2553],GB=m[2554],OB=m[2555],NB=m[2556],DB=m[2557],$B=m[2558],QB=m[2559],UB=m[2560],YB=m[2561],HB=m[2562],KB=m[2563],WB=m[2564],JB=m[2565],VB=m[2566],ZB=m[2567],XB=m[2568],bq=m[2569],lq=m[2570],cq=m[2571],iq=m[2572],eq=m[2573],oq=m[2574],aq=m[2575],nq=m[2576],hq=m[2577],fq=m[2578],tq=m[2579],rq=m[2580],sq=m[2581],_q=m[2582],uq=m[2583],vq=m[2584],dq=m[2585],wq=m[2586],yq=m[2587],Pq=m[2588],pq=m[2589],gq=m[2590],zq=m[2591],mq=m[2592],xq=m[2593],Eq=m[2594],Iq=m[2595],Rq=m[2596],Fq=m[2597],jq=m[2598],Cq=m[2599],kq=m[2600],Lq=m[2601],Tq=m[2602],Sq=m[2603],Mq=m[2604],Bq=m[2605],qq=m[2606],Aq=m[2607],Gq=m[2608],Oq=m[2609],Nq=m[2610],Dq=m[2611],$q=m[2612],Qq=m[2613],Uq=m[2614],Yq=m[2615],Hq=m[2616],Kq=m[2617],Wq=m[2618],Jq=m[2619],Vq=m[2620],Zq=m[2621],Xq=m[2622],bA=m[2623],lA=m[2624],cA=m[2625],iA=m[2626],eA=m[2627],oA=m[2628],aA=m[2629],nA=m[2630],hA=m[2631],fA=m[2632],tA=m[2633],rA=m[2634],sA=m[2635],_A=m[2636],uA=m[2637],vA=m[2638],dA=m[2639],wA=m[2640],yA=m[2641],PA=m[2642],pA=m[2643],gA=m[2644],zA=m[2645],mA=m[2646],xA=m[2647],EA=m[2648],IA=m[2649],RA=m[2650],FA=m[2651],jA=m[2652],CA=m[2653],kA=m[2654],LA=m[2655],TA=m[2656],SA=m[2657],MA=m[2658],BA=m[2659],qA=m[2660],AA=m[2661],GA=m[2662],OA=m[2663],NA=m[2664],DA=m[2665],$A=m[2666],QA=m[2667],UA=m[2668],YA=m[2669],HA=m[2670],KA=m[2671],WA=m[2672],JA=m[2673],VA=m[2674],ZA=m[2675],XA=m[2676],bG=m[2677],lG=m[2678],cG=m[2679],iG=m[2680],eG=m[2681],oG=m[2682],aG=m[2683],nG=m[2684],hG=m[2685],fG=m[2686],tG=m[2687],rG=m[2688],sG=m[2689],_G=m[2690],uG=m[2691],vG=m[2692],dG=m[2693],wG=m[2694],yG=m[2695],PG=m[2696],pG=m[2697],gG=m[2698],zG=m[2699],mG=m[2700],xG=m[2701],EG=m[2702],IG=m[2703],RG=m[2704],FG=m[2705],jG=m[2706],CG=m[2707],kG=m[2708],LG=m[2709],TG=m[2710],SG=m[2711],MG=m[2712],BG=m[2713],qG=m[2714],AG=m[2715],GG=m[2716],OG=m[2717],NG=m[2718],DG=m[2719],$G=m[2720],QG=m[2721],UG=m[2722],YG=m[2723],HG=m[2724],KG=m[2725],WG=m[2726],JG=m[2727],VG=m[2728],ZG=m[2729],XG=m[2730],bO=m[2731],lO=m[2732],cO=m[2733],iO=m[2734],eO=m[2735],oO=m[2736],aO=m[2737],nO=m[2738],hO=m[2739],fO=m[2740],tO=m[2741],rO=m[2742],sO=m[2743],_O=m[2744],uO=m[2745],vO=m[2746],dO=m[2747],wO=m[2748],yO=m[2749],PO=m[2750],pO=m[2751],gO=m[2752],zO=m[2753],mO=m[2754],xO=m[2755],EO=m[2756],IO=m[2757],RO=m[2758],FO=m[2759],jO=m[2760],CO=m[2761],kO=m[2762],LO=m[2763],TO=m[2764],SO=m[2765],MO=m[2766],BO=m[2767],qO=m[2768],AO=m[2769],GO=m[2770],OO=m[2771],NO=m[2772],DO=m[2773],$O=m[2774],QO=m[2775],UO=m[2776],YO=m[2777],HO=m[2778],KO=m[2779],WO=m[2780],JO=m[2781],VO=m[2782],ZO=m[2783],XO=m[2784],bN=m[2785],lN=m[2786],cN=m[2787],iN=m[2788],eN=m[2789],oN=m[2790],aN=m[2791],nN=m[2792],hN=m[2793],fN=m[2794],tN=m[2795],rN=m[2796],sN=m[2797],_N=m[2798],uN=m[2799],vN=m[2800],dN=m[2801],wN=m[2802],yN=m[2803],PN=m[2804],pN=m[2805],gN=m[2806],zN=m[2807],mN=m[2808],xN=m[2809],EN=m[2810],IN=m[2811],RN=m[2812],FN=m[2813],jN=m[2814],CN=m[2815],kN=m[2816],LN=m[2817],TN=m[2818],SN=m[2819],MN=m[2820],BN=m[2821],qN=m[2822],AN=m[2823],GN=m[2824],ON=m[2825],NN=m[2826],DN=m[2827],$N=m[2828],QN=m[2829],UN=m[2830],YN=m[2831],HN=m[2832],KN=m[2833],WN=m[2834],JN=m[2835],VN=m[2836],ZN=m[2837],XN=m[2838],bD=m[2839],lD=m[2840],cD=m[2841],iD=m[2842],eD=m[2843],oD=m[2844],aD=m[2845],nD=m[2846],hD=m[2847],fD=m[2848],tD=m[2849],rD=m[2850],sD=m[2851],_D=m[2852],uD=m[2853],vD=m[2854],dD=m[2855],wD=m[2856],yD=m[2857],PD=m[2858],pD=m[2859],gD=m[2860],zD=m[2861],mD=m[2862],xD=m[2863],ED=m[2864],ID=m[2865],RD=m[2866],FD=m[2867],jD=m[2868],CD=m[2869],kD=m[2870],LD=m[2871],TD=m[2872],SD=m[2873],MD=m[2874],BD=m[2875],qD=m[2876],AD=m[2877],GD=m[2878],OD=m[2879],ND=m[2880],DD=m[2881],$D=m[2882],QD=m[2883],UD=m[2884],YD=m[2885],HD=m[2886],KD=m[2887],WD=m[2888],JD=m[2889],VD=m[2890],ZD=m[2891],XD=m[2892],b$=m[2893],l$=m[2894],c$=m[2895],i$=m[2896],e$=m[2897],o$=m[2898],a$=m[2899],n$=m[2900],h$=m[2901],f$=m[2902],t$=m[2903],r$=m[2904],s$=m[2905],_$=m[2906],u$=m[2907],v$=m[2908],d$=m[2909],w$=m[2910],y$=m[2911],P$=m[2912],p$=m[2913],g$=m[2914],z$=m[2915],m$=m[2916],x$=m[2917],E$=m[2918],I$=m[2919],R$=m[2920],F$=m[2921],j$=m[2922],C$=m[2923],k$=m[2924],L$=m[2925],T$=m[2926],S$=m[2927],M$=m[2928],B$=m[2929],q$=m[2930],A$=m[2931],G$=m[2932],O$=m[2933],N$=m[2934],D$=m[2935],$$=m[2936],Q$=m[2937],U$=m[2938],Y$=m[2939],H$=m[2940],K$=m[2941],W$=m[2942],J$=m[2943],V$=m[2944],Z$=m[2945],X$=m[2946],bQ=m[2947],lQ=m[2948],cQ=m[2949],iQ=m[2950],eQ=m[2951],oQ=m[2952],aQ=m[2953],nQ=m[2954],hQ=m[2955],fQ=m[2956],tQ=m[2957],rQ=m[2958],sQ=m[2959],_Q=m[2960],uQ=m[2961],vQ=m[2962],dQ=m[2963],wQ=m[2964],yQ=m[2965],PQ=m[2966],pQ=m[2967],gQ=m[2968],zQ=m[2969],mQ=m[2970],xQ=m[2971],EQ=m[2972],IQ=m[2973],RQ=m[2974],FQ=m[2975],jQ=m[2976],CQ=m[2977],kQ=m[2978],LQ=m[2979],TQ=m[2980],SQ=m[2981],MQ=m[2982],BQ=m[2983],qQ=m[2984],AQ=m[2985],GQ=m[2986],OQ=m[2987],NQ=m[2988],DQ=m[2989],$Q=m[2990],QQ=m[2991],UQ=m[2992],YQ=m[2993],HQ=m[2994],KQ=m[2996],WQ=m[2997],JQ=m[2998],VQ=m[2999],ZQ=m[3e3],XQ=m[3001],bU=m[3002],lU=m[3003],cU=m[3004],iU=m[3005],eU=m[3006],oU=m[3007],aU=m[3008],nU=m[3009],hU=m[3010],fU=m[3011],tU=m[3012],rU=m[3013],sU=m[3014],_U=m[3015],uU=m[3016],vU=m[3017],dU=m[3018],wU=m[3019],yU=m[3020],PU=m[3021],pU=m[3022],gU=m[3023],zU=m[3024],mU=m[3025],xU=m[3026],EU=m[3027],IU=m[3028],RU=m[3029],FU=m[3030],jU=m[3031],CU=m[3032],kU=m[3033],LU=m[3034],TU=m[3035],SU=m[3036],MU=m[3037],BU=m[3038],qU=m[3039],AU=m[3040],GU=m[3041],OU=m[3042],NU=m[3043],DU=m[3044],$U=m[3045],QU=m[3046],UU=m[3047],YU=m[3048],HU=m[3049],KU=m[3050],WU=m[3051],JU=m[3052],VU=m[3053],ZU=m[3054],XU=m[3055],bY=m[3056],lY=m[3057],cY=m[3058],iY=m[3059],eY=m[3060],oY=m[3061],aY=m[3062],nY=m[3063],hY=m[3064],fY=m[3065],tY=m[3066],rY=m[3067],sY=m[3068],_Y=m[3069],uY=m[3070],vY=m[3071],dY=m[3072],wY=m[3073],yY=m[3074],PY=m[3075],pY=m[3076],gY=m[3077],zY=m[3078],mY=m[3079],xY=m[3080],EY=m[3081],IY=m[3082],RY=m[3083],FY=m[3084],jY=m[3085],CY=m[3086],kY=m[3087],LY=m[3088],TY=m[3089],SY=m[3090],MY=m[3091],BY=m[3092],qY=m[3093],AY=m[3094],GY=m[3095],OY=m[3096],NY=m[3097],DY=m[3098],$Y=m[3099],QY=m[3100],UY=m[3101],YY=m[3102],HY=m[3103],KY=m[3104],WY=m[3105],JY=m[3106],VY=m[3107],ZY=m[3108],XY=m[3109],bH=m[3110],lH=m[3111],cH=m[3112],iH=m[3113],eH=m[3114],oH=m[3115],aH=m[3116],nH=m[3117],hH=m[3118],fH=m[3119],tH=m[3120],rH=m[3121],sH=m[3122],_H=m[3123],uH=m[3124],vH=m[3125],dH=m[3126],wH=m[3127],yH=m[3128],PH=m[3129],pH=m[3130],gH=m[3131],zH=m[3132],mH=m[3133],xH=m[3134],EH=m[3135],IH=m[3136],RH=m[3137],FH=m[3138],jH=m[3139],CH=m[3140],kH=m[3141],LH=m[3142],TH=m[3143],SH=m[3144],MH=m[3145],BH=m[3146],qH=m[3147],AH=m[3148],GH=m[3149],OH=m[3150],NH=m[3151],DH=m[3152],$H=m[3153],QH=m[3154],UH=m[3155],YH=m[3156],HH=m[3157],KH=m[3158],WH=m[3159],JH=m[3160],VH=m[3161],ZH=m[3162],XH=m[3163],bK=m[3164],lK=m[3165],cK=m[3166],iK=m[3167],eK=m[3168],oK=m[3169],aK=m[3170],nK=m[3171],hK=m[3172],fK=m[3173],tK=m[3174],rK=m[3175],sK=m[3176],_K=m[3177],uK=m[3178],vK=m[3179],dK=m[3180],wK=m[3181],yK=m[3182],PK=m[3183],pK=m[3184],gK=m[3185],zK=m[3186],mK=m[3187],xK=m[3188],EK=m[3189],IK=m[3190],RK=m[3191],FK=m[3192],jK=m[3193],CK=m[3194],kK=m[3195],LK=m[3196],TK=m[3197],SK=m[3198],MK=m[3199],BK=m[3200],qK=m[3201],AK=m[3202],GK=m[3203],OK=m[3204],NK=m[3205],DK=m[3206],$K=m[3207],QK=m[3208],UK=m[3209],YK=m[3210],HK=m[3211],KK=m[3212],WK=m[3213],JK=m[3214],VK=m[3215],ZK=m[3216],XK=m[3217],bW=m[3218],lW=m[3219],cW=m[3220],iW=m[3221],eW=m[3222],oW=m[3223],aW=m[3224],nW=m[3225],hW=m[3226],fW=m[3227],tW=m[3228],rW=m[3229],sW=m[3230],_W=m[3231],uW=m[3232],vW=m[3233],dW=m[3234],wW=m[3235],yW=m[3236],PW=m[3237],pW=m[3238],gW=m[3239],zW=m[3240],mW=m[3241],xW=m[3242],EW=m[3243],IW=m[3244],RW=m[3245],FW=m[3246],jW=m[3247],CW=m[3248],kW=m[3249],LW=m[3250],TW=m[3251],SW=m[3252],MW=m[3253],BW=m[3254],qW=m[3255],AW=m[3256],GW=m[3257],OW=m[3258],NW=m[3259],DW=m[3260],$W=m[3261],QW=m[3262],UW=m[3263],YW=m[3264],HW=m[3265],KW=m[3266],WW=m[3267],JW=m[3268],VW=m[3269],ZW=m[3270],XW=m[3271],bJ=m[3272],lJ=m[3273],cJ=m[3274],iJ=m[3275],eJ=m[3276],oJ=m[3277],aJ=m[3278],nJ=m[3279],hJ=m[3280],fJ=m[3281],tJ=m[3282],rJ=m[3283],sJ=m[3284],_J=m[3285],uJ=m[3286],vJ=m[3287],dJ=m[3288],wJ=m[3289],yJ=m[3290],PJ=m[3291],pJ=m[3292],gJ=m[3293],zJ=m[3294],mJ=m[3295],xJ=m[3296],EJ=m[3297],IJ=m[3298],RJ=m[3299],FJ=m[3300],jJ=m[3301],CJ=m[3302],kJ=m[3303],LJ=m[3304],TJ=m[3305],SJ=m[3306],MJ=m[3307],BJ=m[3308],qJ=m[3309],AJ=m[3310],GJ=m[3311],OJ=m[3312],NJ=m[3313],DJ=m[3314],$J=m[3315],QJ=m[3316],UJ=m[3317],YJ=m[3318],HJ=m[3319],KJ=m[3320],WJ=m[3321],JJ=m[3322],VJ=m[3323],ZJ=m[3324],XJ=m[3325],bV=m[3326],lV=m[3327],cV=m[3328],iV=m[3329],eV=m[3330],oV=m[3331],aV=m[3332],nV=m[3333],hV=m[3334],fV=m[3335],tV=m[3336],rV=m[3337],sV=m[3338],_V=m[3339],uV=m[3340],vV=m[3341],dV=m[3342],wV=m[3343],yV=m[3344],PV=m[3345],pV=m[3346],gV=m[3347],zV=m[3348],mV=m[3349],xV=m[3350],EV=m[3351],IV=m[3352],RV=m[3353],FV=m[3354],jV=m[3355],CV=m[3356],kV=m[3357],LV=m[3358],TV=m[3359],SV=m[3360],MV=m[3361],BV=m[3362],qV=m[3363],AV=m[3364],GV=m[3365],OV=m[3366],NV=m[3367],DV=m[3368],$V=m[3369],QV=m[3370],UV=m[3371],YV=m[3372],HV=m[3373],KV=m[3374],WV=m[3375],JV=m[3376],VV=m[3377],ZV=m[3378],XV=m[3379],bZ=m[3380],lZ=m[3381],cZ=m[3382],iZ=m[3383],eZ=m[3384],oZ=m[3385],aZ=m[3386],nZ=m[3387],hZ=m[3388],fZ=m[3389],tZ=m[3390],rZ=m[3391],sZ=m[3392],_Z=m[3393],uZ=m[3394],vZ=m[3395],dZ=m[3396],wZ=m[3397],yZ=m[3398],PZ=m[3399],pZ=m[3400],gZ=m[3401],zZ=m[3402],mZ=m[3403],xZ=m[3404],EZ=m[3405],IZ=m[3406],RZ=m[3407],FZ=m[3408],jZ=m[3409],CZ=m[3410],kZ=m[3411],LZ=m[3412],TZ=m[3413],SZ=m[3414],MZ=m[3415],BZ=m[3416],qZ=m[3417],AZ=m[3418],GZ=m[3419],OZ=m[3420],NZ=m[3421],DZ=m[3422],$Z=m[3423],QZ=m[3424],UZ=m[3425],YZ=m[3426],HZ=m[3427],KZ=m[3428],WZ=m[3429],JZ=m[3430],VZ=m[3431],ZZ=m[3432],XZ=m[3433],bX=m[3434],lX=m[3435],cX=m[3436],iX=m[3437],eX=m[3438],oX=m[3439],aX=m[3440],nX=m[3441],hX=m[3442],fX=m[3443],tX=m[3444],rX=m[3445],sX=m[3446],_X=m[3447],uX=m[3448],vX=m[3449],dX=m[3450],wX=m[3451],yX=m[3452],PX=m[3453],pX=m[3454],gX=m[3455],zX=m[3456],mX=m[3457],xX=m[3458],EX=m[3459],IX=m[3460],RX=m[3461],FX=m[3462],jX=m[3463],CX=m[3464],kX=m[3465],LX=m[3466],TX=m[3467],SX=m[3468],MX=m[3469],BX=m[3470],qX=m[3471],AX=m[3473],GX=m[3474],OX=m[3475],NX=m[3476],DX=m[3477],$X=m[3478],QX=m[3479],UX=m[3480],YX=m[3481],HX=m[3482],KX=m[3483],WX=m[3484],JX=m[3485],VX=m[3486],ZX=m[3487],XX=m[3488],b22=m[3489],l22=m[3490],c22=m[3491],i22=m[3492],e22=m[3493],o22=m[3494],a22=m[3495],n22=m[3496],h22=m[3497],f22=m[3498],t22=m[3499],r22=m[3500],s22=m[3501],_22=m[3502],u22=m[3503],v22=m[3504],d22=m[3505],w22=m[3506],y22=m[3507],P22=m[3508],p22=m[3509],g22=m[3510],z22=m[3511],m22=m[3512],x22=m[3513],E22=m[3514],I22=m[3515],R22=m[3516],F22=m[3517],j22=m[3518],C22=m[3519],k22=m[3520],L22=m[3521],T22=m[3522],S22=m[3523],M22=m[3524],B22=m[3525],q22=m[3526],A22=m[3527],G22=m[3528],O22=m[3529],N22=m[3530],D22=m[3531],$22=m[3532],Q22=m[3533],U22=m[3534],Y22=m[3535],H22=m[3536],K22=m[3537],W22=m[3538],J22=m[3539],V22=m[3540],Z22=m[3541],X22=m[3542],b12=m[3543],l12=m[3544],c12=m[3545],i12=m[3546],e12=m[3547],o12=m[3548],a12=m[3549],n12=m[3550],h12=m[3551],f12=m[3552],t12=m[3553],r12=m[3554],s12=m[3555],_12=m[3556],u12=m[3557],v12=m[3558],d12=m[3559],w12=m[3560],y12=m[3561],P12=m[3562],p12=m[3563],g12=m[3564],z12=m[3565],m12=m[3566],x12=m[3567],E12=m[3568],I12=m[3569],R12=m[3570],F12=m[3571],j12=m[3572],C12=m[3573],k12=m[3574],L12=m[3575],T12=m[3576],S12=m[3577],M12=m[3578],B12=m[3579],q12=m[3580],A12=m[3581],G12=m[3582],O12=m[3583],N12=m[3584],D12=m[3585],$12=m[3586],Q12=m[3587],U12=m[3588],Y12=m[3589],H12=m[3590],K12=m[3591],W12=m[3592],J12=m[3593],V12=m[3594],Z12=m[3595],X12=m[3596],b52=m[3597],l52=m[3598],c52=m[3599],i52=m[3600],e52=m[3601],o52=m[3602],a52=m[3603],n52=m[3604],h52=m[3605],f52=m[3606],t52=m[3607],r52=m[3608],s52=m[3609],_52=m[3610],u52=m[3611],v52=m[3612],d52=m[3613],w52=m[3614],y52=m[3615],P52=m[3616],p52=m[3617],g52=m[3618],z52=m[3619],m52=m[3620],x52=m[3621],E52=m[3622],I52=m[3623],R52=m[3624],F52=m[3625],j52=m[3626],C52=m[3627],k52=m[3628],L52=m[3629],T52=m[3630],S52=m[3631],M52=m[3632],B52=m[3633],q52=m[3634],A52=m[3635],G52=m[3636],O52=m[3637],N52=m[3638],D52=m[3639],$52=m[3640],Q52=m[3641],U52=m[3642],Y52=m[3643],H52=m[3644],K52=m[3645],W52=m[3646],J52=m[3647],V52=m[3648],Z52=m[3649],X52=m[3650],b72=m[3651],l72=m[3652],c72=m[3653],i72=m[3654],e72=m[3655],o72=m[3656],a72=m[3657],n72=m[3658],h72=m[3659],f72=m[3660],t72=m[3661],r72=m[3662],s72=m[3663],_72=m[3664],u72=m[3665],v72=m[3666],d72=m[3667],w72=m[3668],y72=m[3669],P72=m[3670],p72=m[3671],g72=m[3672],z72=m[3673],m72=m[3674],x72=m[3675],E72=m[3676],I72=m[3677],R72=m[3678],F72=m[3679],j72=m[3680],C72=m[3681],k72=m[3682],L72=m[3683],T72=m[3684],S72=m[3685],M72=m[3686],B72=m[3687],q72=m[3688],A72=m[3689],G72=m[3690],O72=m[3691],N72=m[3692],D72=m[3693],$72=m[3694],Q72=m[3695],U72=m[3696],Y72=m[3697],H72=m[3698],K72=m[3699],W72=m[3700],J72=m[3701],V72=m[3703],Z72=m[3704],X72=m[3705],b32=m[3706],l32=m[3707],c32=m[3708],i32=m[3709],e32=m[3710],o32=m[3711],a32=m[3712],n32=m[3713],h32=m[3714],f32=m[3715],t32=m[3716],r32=m[3717],s32=m[3718],_32=m[3719],u32=m[3720],v32=m[3721],d32=m[3722],w32=m[3723],y32=m[3724],P32=m[3725],p32=m[3726],g32=m[3727],z32=m[3728],m32=m[3729],x32=m[3730],E32=m[3731],I32=m[3732],R32=m[3733],F32=m[3734],j32=m[3735],C32=m[3736],k32=m[3737],L32=m[3738],T32=m[3739],S32=m[3740],M32=m[3741],B32=m[3742],q32=m[3743],A32=m[3744],G32=m[3745],O32=m[3746],N32=m[3747],D32=m[3748],$32=m[3749],Q32=m[3750],U32=m[3751],Y32=m[3752],H32=m[3753],K32=m[3754],W32=m[3755],J32=m[3756],V32=m[3757],Z32=m[3758],X32=m[3759],b42=m[3760],l42=m[3761],c42=m[3762],i42=m[3763],e42=m[3764],o42=m[3765],a42=m[3766],n42=m[3767],h42=m[3768],f42=m[3769],t42=m[3770],r42=m[3771],s42=m[3772],_42=m[3773],u42=m[3774],v42=m[3775],d42=m[3776],w42=m[3777],y42=m[3778],P42=m[3779],p42=m[3780],g42=m[3781],z42=m[3782],m42=m[3783],x42=m[3784],E42=m[3785],I42=m[3786],R42=m[3787],F42=m[3788],j42=m[3789],C42=m[3790],k42=m[3791],L42=m[3792],T42=m[3793],S42=m[3794],M42=m[3795],B42=m[3796],q42=m[3797],A42=m[3798],G42=m[3799],O42=m[3800],N42=m[3801],D42=m[3802],$42=m[3803],Q42=m[3804],U42=m[3805],Y42=m[3806],H42=m[3807],K42=m[3808],W42=m[3809],J42=m[3810],V42=m[3811],Z42=m[3812],X42=m[3813],b62=m[3814],l62=m[3815],c62=m[3816],i62=m[3817],e62=m[3818],o62=m[3819],a62=m[3820],n62=m[3821],h62=m[3822],f62=m[3823],t62=m[3824],r62=m[3825],s62=m[3826],_62=m[3827],u62=m[3828],v62=m[3829],d62=m[3830],w62=m[3831],y62=m[3832],P62=m[3833],p62=m[3834],g62=m[3835],z62=m[3836],m62=m[3837],x62=m[3838],E62=m[3839],I62=m[3840],R62=m[3841],F62=m[3842],j62=m[3843],C62=m[3844],k62=m[3845],L62=m[3846],T62=m[3847],S62=m[3848],M62=m[3849],B62=m[3850],q62=m[3851],A62=m[3852],G62=m[3853],O62=m[3854],N62=m[3855],D62=m[3856],$62=m[3857],Q62=m[3858],U62=m[3859],Y62=m[3860],H62=m[3861],K62=m[3862],W62=m[3863],J62=m[3864],V62=m[3865],Z62=m[3866],X62=m[3867],b02=m[3868],l02=m[3869],c02=m[3870],i02=m[3871],e02=m[3872],o02=m[3873],a02=m[3874],n02=m[3875],h02=m[3876],f02=m[3877],t02=m[3878],r02=m[3879],s02=m[3880],_02=m[3881],u02=m[3882],v02=m[3883],d02=m[3884],w02=m[3885],y02=m[3886],P02=m[3887],p02=m[3888],g02=m[3889],z02=m[3890],m02=m[3891],x02=m[3892],E02=m[3893],I02=m[3894],R02=m[3895],F02=m[3896],j02=m[3897],C02=m[3898],k02=m[3899],L02=m[3900],T02=m[3901],S02=m[3902],M02=m[3903],B02=m[3904],q02=m[3905],A02=m[3906],G02=m[3907],O02=m[3908],N02=m[3909],D02=m[3910],$02=m[3911],Q02=m[3912],U02=m[3913],Y02=m[3914],H02=m[3915],K02=m[3916],W02=m[3917],J02=m[3918],V02=m[3919],Z02=m[3920],X02=m[3921],b82=m[3922],l82=m[3923],c82=m[3924],i82=m[3925],e82=m[3926],o82=m[3927],a82=m[3928],n82=m[3929],h82=m[3931],f82=m[3932],t82=m[3933],r82=m[3934],s82=m[3935],_82=m[3936],u82=m[3937],v82=m[3938],d82=m[3939],w82=m[3940],y82=m[3941],P82=m[3942],p82=m[3943],g82=m[3944],z82=m[3945],m82=m[3946],x82=m[3947],E82=m[3948],I82=m[3949],R82=m[3950],F82=m[3951],j82=m[3952],C82=m[3953],k82=m[3954],L82=m[3955],T82=m[3956],S82=m[3957],M82=m[3958],B82=m[3959],q82=m[3960],A82=m[3961],G82=m[3962],O82=m[3963],N82=m[3964],D82=m[3965],$82=m[3966],Q82=m[3967],U82=m[3968],Y82=m[3969],H82=m[3970],K82=m[3971],W82=m[3972],J82=m[3973],V82=m[3974],Z82=m[3975],X82=m[3976],b92=m[3977],l92=m[3978],c92=m[3979],i92=m[3980],e92=m[3981],o92=m[3982],a92=m[3983],n92=m[3984],h92=m[3985],f92=m[3986],t92=m[3987],r92=m[3988],s92=m[3989],_92=m[3990],u92=m[3991],v92=m[3992],d92=m[3993],w92=m[3994],y92=m[3995],P92=m[3996],p92=m[3997],g92=m[3998],z92=m[3999],m92=m[4e3],x92=m[4001],E92=m[4002],I92=m[4003],R92=m[4004],F92=m[4005],j92=m[4006],C92=m[4007],k92=m[4008],L92=m[4009],T92=m[4010],S92=m[4011],M92=m[4012],B92=m[4013],q92=m[4014],A92=m[4015],G92=m[4016],O92=m[4017],N92=m[4018],D92=m[4019],$92=m[4020],Q92=m[4021],U92=m[4022],Y92=m[4023],H92=m[4024],K92=m[4025],W92=m[4026],J92=m[4027],V92=m[4028],Z92=m[4029],X92=m[4030],bb2=m[4031],lb2=m[4032],cb2=m[4033],ib2=m[4034],eb2=m[4035],ob2=m[4036],ab2=m[4037],nb2=m[4038],hb2=m[4039],fb2=m[4040],tb2=m[4041],rb2=m[4043],sb2=m[4044],_b2=m[4045],ub2=m[4046],vb2=m[4047],db2=m[4048],wb2=m[4049],yb2=m[4050],Pb2=m[4051],pb2=m[4052],gb2=m[4053],zb2=m[4054],mb2=m[4055],xb2=m[4056],Eb2=m[4057],Ib2=m[4058],Rb2=m[4059],Fb2=m[4060],jb2=m[4061],Cb2=m[4062],kb2=m[4063],Lb2=m[4064],Tb2=m[4065],Sb2=m[4066],Mb2=m[4067],Bb2=m[4068],qb2=m[4070],Ab2=m[4071],Gb2=m[4072],Ob2=m[4073],Nb2=m[4074],Db2=m[4075],$b2=m[4076],Qb2=m[4077],Ub2=m[4078],Yb2=m[4079],Hb2=m[4080],Kb2=m[4081],Wb2=m[4082],Jb2=m[4083],Vb2=m[4084],Zb2=m[4085],Xb2=m[4086],bl2=m[4087],ll2=m[4088],cl2=m[4089],il2=m[4090],el2=m[4091],ol2=m[4092],al2=m[4093],nl2=m[4095],hl2=m[4096],fl2=m[4097],tl2=m[4098],rl2=m[4099],sl2=m[4100],_l2=m[4101],ul2=m[4102],vl2=m[4103],dl2=m[4104],wl2=m[4105],yl2=m[4106],Pl2=m[4107],pl2=m[4108],gl2=m[4109],zl2=m[4110],ml2=m[4111],xl2=m[4112],El2=m[4113],Il2=m[4114],Rl2=m[4115],Fl2=m[4116],jl2=m[4117],Cl2=m[4118],kl2=m[4119],Ll2=m[4120],Tl2=m[4121],Sl2=m[4122],Ml2=m[4123],Bl2=m[4124],ql2=m[4125],Al2=m[4126],Gl2=m[4127],Ol2=m[4128],Nl2=m[4129],Dl2=m[4130],$l2=m[4131],Ql2=m[4132],Ul2=m[4133],Yl2=m[4134],Hl2=m[4135],Kl2=m[4136],Wl2=m[4137],Jl2=m[4138],Vl2=m[4139],Zl2=m[4140],Xl2=m[4141],bc2=m[4142],lc2=m[4143],cc2=m[4144],ic2=m[4145],ec2=m[4146],oc2=m[4147],ac2=m[4148],nc2=m[4149],hc2=m[4150],fc2=m[4151],tc2=m[4152],rc2=m[4153],sc2=m[4154],_c2=m[4155],uc2=m[4156],vc2=m[4157],dc2=m[4158],wc2=m[4159],yc2=m[4160],Pc2=m[4161],pc2=m[4162],gc2=m[4163],zc2=m[4164],mc2=m[4165],xc2=m[4166],Ec2=m[4167],Ic2=m[4168],Rc2=m[4169],Fc2=m[4170],jc2=m[4171],Cc2=m[4172],kc2=m[4173],Lc2=m[4174],Tc2=m[4175],Sc2=m[4176],Mc2=m[4177],Bc2=m[4178],qc2=m[4179],Ac2=m[4180],Gc2=m[4181],Oc2=m[4182],Nc2=m[4183],Dc2=m[4184],$c2=m[4185],Qc2=m[4186],Uc2=m[4187],Yc2=m[4188],Hc2=m[4189],Kc2=m[4190],Wc2=m[4191],Jc2=m[4192],Vc2=m[4193],Zc2=m[4194],Xc2=m[4195],bi2=m[4196],li2=m[4197],ci2=m[4198],ii2=m[4199],ei2=m[4200],oi2=m[4201],ai2=m[4202],ni2=m[4203],hi2=m[4204],fi2=m[4205],ti2=m[4206],ri2=m[4207],si2=m[4208],_i2=m[4209],ui2=m[4210],vi2=m[4211],di2=m[4212],wi2=m[4213],yi2=m[4214],Pi2=m[4215],pi2=m[4216],gi2=m[4217],zi2=m[4218],mi2=m[4219],xi2=m[4220],Ei2=m[4221],Ii2=m[4222],Ri2=m[4223],Fi2=m[4224],ji2=m[4225],Ci2=m[4226],ki2=m[4227],Li2=m[4228],Ti2=m[4229],Si2=m[4230],Mi2=m[4231],Bi2=m[4232],qi2=m[4233],Ai2=m[4234],Gi2=m[4235],Oi2=m[4236],Ni2=m[4237],Di2=m[4238],$i2=m[4239],Qi2=m[4240],Ui2=m[4241],Yi2=m[4242],Hi2=m[4243],Ki2=m[4244],Wi2=m[4245],Ji2=m[4246],Vi2=m[4247],Zi2=m[4248],Xi2=m[4249],be2=m[4250],le2=m[4251],ce2=m[4252],ie2=m[4253],ee2=m[4254],oe2=m[4255],ae2=m[4256],ne2=m[4257],he2=m[4258],fe2=m[4259],te2=m[4260],re2=m[4261],se2=m[4262],_e2=m[4263],ue2=m[4264],ve2=m[4265],de2=m[4266],we2=m[4267],ye2=m[4268],Pe2=m[4269],pe2=m[4270],ge2=m[4271],ze2=m[4272],me2=m[4273],xe2=m[4274],Ee2=m[4275],Ie2=m[4276],Re2=m[4277],Fe2=m[4278],je2=m[4279],Ce2=m[4280],ke2=m[4281],Le2=m[4282],Te2=m[4283],Se2=m[4284],Me2=m[4285],Be2=m[4286],qe2=m[4287],Ae2=m[4288],Ge2=m[4289],Oe2=m[4290],Ne2=m[4291],De2=m[4292],$e2=m[4293],Qe2=m[4294],Ue2=m[4295],Ye2=m[4296],He2=m[4297],Ke2=m[4298],We2=m[4299],Je2=m[4300],Ve2=m[4301],Ze2=m[4302],Xe2=m[4303],bo2=m[4304],lo2=m[4305],co2=m[4306],io2=m[4307],eo2=m[4308],oo2=m[4309],ao2=m[4310],no2=m[4311],ho2=m[4312],fo2=m[4313],to2=m[4314],ro2=m[4315],so2=m[4316],_o2=m[4317],uo2=m[4318],vo2=m[4319],do2=m[4320],wo2=m[4321],yo2=m[4322],Po2=m[4323],po2=m[4324],go2=m[4325],zo2=m[4326],mo2=m[4327],xo2=m[4328],Eo2=m[4329],Io2=m[4330],Ro2=m[4331],Fo2=m[4332],jo2=m[4333],Co2=m[4334],ko2=m[4335],Lo2=m[4336],To2=m[4337],So2=m[4338],Mo2=m[4339],Bo2=m[4340],qo2=m[4341],Ao2=m[4342],Go2=m[4343],Oo2=m[4344],No2=m[4345],Do2=m[4346],$o2=m[4347],Qo2=m[4348],Uo2=m[4349],Yo2=m[4350],Ho2=m[4351],Ko2=m[4352],Wo2=m[4353],Jo2=m[4354],Vo2=m[4355],Zo2=m[4356],Xo2=m[4357],ba2=m[4358],la2=m[4359],ca2=m[4360],ia2=m[4361],ea2=m[4362],oa2=m[4363],aa2=m[4364],na2=m[4365],ha2=m[4366],fa2=m[4367],ta2=m[4368],ra2=m[4369],sa2=m[4370],_a2=m[4371],ua2=m[4372],va2=m[4373],da2=m[4374],wa2=m[4375],ya2=m[4376],Pa2=m[4377],pa2=m[4378],ga2=m[4379],za2=m[4380],ma2=m[4381],xa2=m[4382],Ea2=m[4383],Ia2=m[4384],Ra2=m[4385],Fa2=m[4386],ja2=m[4387],Ca2=m[4388],ka2=m[4389],La2=m[4390],Ta2=m[4391],Sa2=m[4392],Ma2=m[4393],Ba2=m[4394],qa2=m[4395],Aa2=m[4396],Ga2=m[4397],Oa2=m[4398],Na2=m[4399],Da2=m[4400],$a2=m[4401],Qa2=m[4402],Ua2=m[4403],Ya2=m[4404],Ha2=m[4405],Ka2=m[4406],Wa2=m[4407],Ja2=m[4408],Va2=m[4409],Za2=m[4410],Xa2=m[4411],bn2=m[4412],ln2=m[4413],cn2=m[4414],in2=m[4415],en2=m[4416],on2=m[4417],an2=m[4418],nn2=m[4419],hn2=m[4420],fn2=m[4421],tn2=m[4422],rn2=m[4423],sn2=m[4424],_n2=m[4425],un2=m[4426],vn2=m[4427],dn2=m[4428],wn2=m[4429],yn2=m[4430],Pn2=m[4431],pn2=m[4432],gn2=m[4433],zn2=m[4434],mn2=m[4435],xn2=m[4436],En2=m[4437],In2=m[4438],Rn2=m[4439],Fn2=m[4440],jn2=m[4441],Cn2=m[4442],kn2=m[4443],Ln2=m[4444],Tn2=m[4445],Sn2=m[4446],Mn2=m[4447],Bn2=m[4448],qn2=m[4449],An2=m[4450],Gn2=m[4451],On2=m[4452],Nn2=m[4453],Dn2=m[4454],$n2=m[4455],Qn2=m[4456],Un2=m[4457],Yn2=m[4458],Hn2=m[4459],Kn2=m[4460],Wn2=m[4461],Jn2=m[4462],Vn2=m[4464],Zn2=m[4465],Xn2=m[4466],bh2=m[4467],lh2=m[4468],ch2=m[4469],ih2=m[4470],eh2=m[4471],oh2=m[4472],ah2=m[4473],nh2=m[4474],hh2=m[4475],fh2=m[4476],th2=m[4477],rh2=m[4478],sh2=m[4479],_h2=m[4480],uh2=m[4481],vh2=m[4482],dh2=m[4483],wh2=m[4484],yh2=m[4485],Ph2=m[4486],ph2=m[4487],gh2=m[4488],zh2=m[4489],mh2=m[4490],xh2=m[4491],Eh2=m[4492],Ih2=m[4493],Rh2=m[4494],Fh2=m[4495],jh2=m[4496],Ch2=m[4497],kh2=m[4498],Lh2=m[4499],Th2=m[4500],Sh2=m[4501],Mh2=m[4502],Bh2=m[4503],qh2=m[4504],Ah2=m[4505],Gh2=m[4506],Oh2=m[4507],Nh2=m[4508],Dh2=m[4509],$h2=m[4510],Qh2=m[4511],Uh2=m[4512],Yh2=m[4513],Hh2=m[4514],Kh2=m[4515],Wh2=m[4516],Jh2=m[4517],Vh2=m[4518],Zh2=m[4519],Xh2=m[4520],bf2=m[4521],lf2=m[4522],cf2=m[4523],if2=m[4524],ef2=m[4525],of2=m[4526],af2=m[4527],nf2=m[4528],hf2=m[4529],ff2=m[4530],tf2=m[4531],rf2=m[4532],sf2=m[4533],_f2=m[4534],uf2=m[4535],vf2=m[4536],df2=m[4537],wf2=m[4538],yf2=m[4539],Pf2=m[4540],pf2=m[4541],gf2=m[4542],zf2=m[4543],mf2=m[4544],xf2=m[4545],Ef2=m[4546],If2=m[4547],Rf2=m[4548],Ff2=m[4549],jf2=m[4550],Cf2=m[4551],kf2=m[4552],Lf2=m[4553],Tf2=m[4554],Sf2=m[4555],Mf2=m[4556],Bf2=m[4557],qf2=m[4558],Af2=m[4559],Gf2=m[4560],Of2=m[4561],Nf2=m[4562],Df2=m[4563],$f2=m[4564],Qf2=m[4565],Uf2=m[4566],Yf2=m[4567],Hf2=m[4568],Kf2=m[4569],Wf2=m[4570],Jf2=m[4571],Vf2=m[4572],Zf2=m[4573],Xf2=m[4574],bt2=m[4575],lt2=m[4576],ct2=m[4577],it2=m[4578],et2=m[4579],ot2=m[4580],at2=m[4581],nt2=m[4582],ht2=m[4583],ft2=m[4584],tt2=m[4585],rt2=m[4586],st2=m[4587],_t2=m[4588],ut2=m[4589],vt2=m[4590],dt2=m[4591],wt2=m[4592],yt2=m[4593],Pt2=m[4594],pt2=m[4595],gt2=m[4596],zt2=m[4597],mt2=m[4598],xt2=m[4599],Et2=m[4600],It2=m[4601],Rt2=m[4602],Ft2=m[4603],jt2=m[4604],Ct2=m[4605],kt2=m[4606],Lt2=m[4607],Tt2=m[4608],St2=m[4609],Mt2=m[4610],Bt2=m[4611],qt2=m[4613],At2=m[4614],Gt2=m[4615],Ot2=m[4616],Nt2=m[4617],Dt2=m[4618],$t2=m[4619],Qt2=m[4620],Ut2=m[4621],Yt2=m[4622],Ht2=m[4623],Kt2=m[4624],Wt2=m[4625],Jt2=m[4626],Vt2=m[4627],Zt2=m[4628],Xt2=m[4629],br2=m[4630],lr2=m[4631],cr2=m[4632],ir2=m[4633],er2=m[4634],or2=m[4635],ar2=m[4636],nr2=m[4637],hr2=m[4638],fr2=m[4639],tr2=m[4640],rr2=m[4641],sr2=m[4642],_r2=m[4643],ur2=m[4644],vr2=m[4645],dr2=m[4646],wr2=m[4647],yr2=m[4648],Pr2=m[4649],pr2=m[4650],gr2=m[4651],zr2=m[4652],mr2=m[4653],xr2=m[4654],Er2=m[4655],Ir2=m[4656],Rr2=m[4658],Fr2=m[4659],jr2=m[4660],Cr2=m[4661],kr2=m[4662],Lr2=m[4663],Tr2=m[4664],Sr2=m[4665],Mr2=m[4666],Br2=m[4667],qr2=m[4668],Ar2=m[4669],Gr2=m[4670],Or2=m[4671],Nr2=m[4672],Dr2=m[4673],$r2=m[4674],Qr2=m[4675],Ur2=m[4676],Yr2=m[4677],Hr2=m[4678],Kr2=m[4679],Wr2=m[4680],Jr2=m[4681],Vr2=m[4682],Zr2=m[4683],Xr2=m[4684],bs2=m[4685],ls2=m[4686],cs2=m[4687],is2=m[4688],es2=m[4689],os2=m[4690],as2=m[4691],ns2=m[4692],hs2=m[4693],fs2=m[4694],ts2=m[4695],rs2=m[4696],ss2=m[4697],_s2=m[4698],us2=m[4699],vs2=m[4700],ds2=m[4701],ws2=m[4702],ys2=m[4703],Ps2=m[4704],ps2=m[4705],gs2=m[4706],zs2=m[4707],ms2=m[4708],xs2=m[4709],Es2=m[4710],Is2=m[4711],Rs2=m[4712],Fs2=m[4713],js2=m[4714],Cs2=m[4715],ks2=m[4716],Ls2=m[4717],Ts2=m[4718],Ss2=m[4719],Ms2=m[4720],Bs2=m[4721],qs2=m[4722],As2=m[4723],Gs2=m[4724],Os2=m[4725],Ns2=m[4726],Ds2=m[4727],$s2=m[4728],Qs2=m[4729],Us2=m[4730],Ys2=m[4731],Hs2=m[4732],Ks2=m[4733],Ws2=m[4734],Js2=m[4735],Vs2=m[4736],Zs2=m[4737],Xs2=m[4738],b_2=m[4739],l_2=m[4740],c_2=m[4741],i_2=m[4742],e_2=m[4743],o_2=m[4744],a_2=m[4745],n_2=m[4746],h_2=m[4747],f_2=m[4748],t_2=m[4749],r_2=m[4750],s_2=m[4751],__2=m[4752],u_2=m[4753],v_2=m[4754],d_2=m[4755],w_2=m[4756],y_2=m[4757],P_2=m[4758],p_2=m[4759],g_2=m[4760],z_2=m[4761],m_2=m[4762],x_2=m[4763],E_2=m[4764],I_2=m[4765],R_2=m[4766],F_2=m[4767],j_2=m[4768],C_2=m[4769],k_2=m[4770],L_2=m[4771],T_2=m[4772],S_2=m[4773],M_2=m[4774],B_2=m[4775],q_2=m[4776],A_2=m[4777],G_2=m[4778],O_2=m[4779],N_2=m[4780],D_2=m[4781],$_2=m[4782],Q_2=m[4783],U_2=m[4784],Y_2=m[4785],H_2=m[4786],K_2=m[4787],W_2=m[4788],J_2=m[4789],V_2=m[4790],Z_2=m[4791],X_2=m[4792],bu2=m[4793],lu2=m[4794],cu2=m[4795],iu2=m[4796],eu2=m[4797],ou2=m[4798],au2=m[4799],nu2=m[4800],hu2=m[4801],fu2=m[4802],tu2=m[4803],ru2=m[4804],su2=m[4805],_u2=m[4806],uu2=m[4807],vu2=m[4808],du2=m[4809],wu2=m[4810],yu2=m[4811],Pu2=m[4812],pu2=m[4813],gu2=m[4814],zu2=m[4815],mu2=m[4816],xu2=m[4817],Eu2=m[4818],Iu2=m[4819],Ru2=m[4820],Fu2=m[4821],ju2=m[4822],Cu2=m[4823],ku2=m[4824],Lu2=m[4825],Tu2=m[4826],Su2=m[4827],Mu2=m[4828],Bu2=m[4829],qu2=m[4830],Au2=m[4831],Gu2=m[4832],Ou2=m[4833],Nu2=m[4834],Du2=m[4835],$u2=m[4836],Qu2=m[4837],Uu2=m[4838],Yu2=m[4839],Hu2=m[4840],Ku2=m[4841],Wu2=m[4842],Ju2=m[4843],Vu2=m[4844],Zu2=m[4845],Xu2=m[4846],bv2=m[4847],lv2=m[4848],cv2=m[4849],iv2=m[4850],ev2=m[4851],ov2=m[4852],av2=m[4853],nv2=m[4854],hv2=m[4855],fv2=m[4856],tv2=m[4857],rv2=m[4858],sv2=m[4859],_v2=m[4860],uv2=m[4861],vv2=m[4862],dv2=m[4863],wv2=m[4864],yv2=m[4865],Pv2=m[4866],pv2=m[4867],gv2=m[4868],zv2=m[4869],mv2=m[4870],xv2=m[4871],Ev2=m[4872],Iv2=m[4873],Rv2=m[4874],Fv2=m[4875],jv2=m[4876],Cv2=m[4877],kv2=m[4878],Lv2=m[4879],Tv2=m[4880],Sv2=m[4881],Mv2=m[4882],Bv2=m[4883],qv2=m[4884],Av2=m[4885],Gv2=m[4886],Ov2=m[4887],Nv2=m[4888],Dv2=m[4889],$v2=m[4890],Qv2=m[4891],Uv2=m[4892],Yv2=m[4893],Hv2=m[4894],Kv2=m[4895],Wv2=m[4896],Jv2=m[4897],Vv2=m[4898],Zv2=m[4899],Xv2=m[4900],bd2=m[4901],ld2=m[4902],cd2=m[4903],id2=m[4904],ed2=m[4905],od2=m[4906],ad2=m[4907],nd2=m[4908],hd2=m[4909],fd2=m[4910],td2=m[4911],rd2=m[4912],sd2=m[4913],_d2=m[4914],ud2=m[4915],vd2=m[4916],dd2=m[4917],wd2=m[4918],yd2=m[4919],Pd2=m[4920],pd2=m[4921],gd2=m[4922],zd2=m[4923],md2=m[4924],xd2=m[4925],Ed2=m[4926],Id2=m[4927],Rd2=m[4928],Fd2=m[4929],jd2=m[4930],Cd2=m[4931],kd2=m[4932],Ld2=m[4933],Td2=m[4934],Sd2=m[4935],Md2=m[4936],Bd2=m[4937],qd2=m[4938],Ad2=m[4939],Gd2=m[4940],Od2=m[4941],Nd2=m[4942],Dd2=m[4943],$d2=m[4944],Qd2=m[4945],Ud2=m[4946],Yd2=m[4947],Hd2=m[4948],Kd2=m[4949],Wd2=m[4950],Jd2=m[4951],Vd2=m[4952],Zd2=m[4953],Xd2=m[4954],bw2=m[4955],lw2=m[4956],cw2=m[4957],iw2=m[4958],ew2=m[4959],ow2=m[4960],aw2=m[4961],nw2=m[4962],hw2=m[4963],fw2=m[4964],tw2=m[4965],rw2=m[4966],sw2=m[4967],_w2=m[4968],uw2=m[4969],vw2=m[4970],dw2=m[4971],ww2=m[4972],yw2=m[4973],Pw2=m[4974],pw2=m[4975],gw2=m[4976],zw2=m[4977],mw2=m[4978],xw2=m[4979],Ew2=m[4980],Iw2=m[4981],Rw2=m[4982],Fw2=m[4983],jw2=m[4984],Cw2=m[4985],kw2=m[4986],Lw2=m[4987],Tw2=m[4988],Sw2=m[4989],Mw2=m[4990],Bw2=m[4991],qw2=m[4992],Aw2=m[4993],Gw2=m[4994],Ow2=m[4995],Nw2=m[4996],Dw2=m[4997],$w2=m[4998],Qw2=m[4999],Uw2=m[5e3],Yw2=m[5001],Hw2=m[5002],Kw2=m[5003],Ww2=m[5004],Jw2=m[5005],Vw2=m[5006],Zw2=m[5007],Xw2=m[5008],by2=m[5009],ly2=m[5010],cy2=m[5011],iy2=m[5012],ey2=m[5013],oy2=m[5014],ay2=m[5015],ny2=m[5016],hy2=m[5017],fy2=m[5018],ty2=m[5019],ry2=m[5020],sy2=m[5021],_y2=m[5022],uy2=m[5023],vy2=m[5024],dy2=m[5025],wy2=m[5026],yy2=m[5027],Py2=m[5028],py2=m[5029],gy2=m[5030],zy2=m[5031],my2=m[5032],xy2=m[5033],Ey2=m[5034],Iy2=m[5035],Ry2=m[5036],Fy2=m[5037],jy2=m[5038],Cy2=m[5039],ky2=m[5040],Ly2=m[5041],Ty2=m[5042],Sy2=m[5043],My2=m[5044],By2=m[5045],qy2=m[5046],Ay2=m[5047],Gy2=m[5048],Oy2=m[5049],Ny2=m[5050],Dy2=m[5051],$y2=m[5052],Qy2=m[5053],Uy2=m[5054],Yy2=m[5055],Hy2=m[5056],Ky2=m[5057],Wy2=m[5058],Jy2=m[5059],Vy2=m[5060],Zy2=m[5061],Xy2=m[5062],bP2=m[5063],lP2=m[5064],cP2=m[5065],iP2=m[5066],eP2=m[5067],oP2=m[5068],aP2=m[5069],nP2=m[5070],hP2=m[5071],fP2=m[5072],tP2=m[5073],rP2=m[5074],sP2=m[5075],_P2=m[5076],uP2=m[5077],vP2=m[5078],dP2=m[5080],wP2=m[5081],yP2=m[5082],PP2=m[5083],pP2=m[5084],gP2=m[5085],zP2=m[5086],mP2=m[5087],xP2=m[5088],EP2=m[5089],IP2=m[5090],RP2=m[5091],FP2=m[5092],jP2=m[5093],CP2=m[5094],kP2=m[5095],LP2=m[5096],TP2=m[5097],SP2=m[5098],MP2=m[5099],BP2=m[5100],qP2=m[5101],AP2=m[5102],GP2=m[5103],OP2=m[5104],NP2=m[5105],DP2=m[5106],$P2=m[5107],QP2=m[5108],UP2=m[5109],YP2=m[5110],HP2=m[5111],KP2=m[5112],WP2=m[5113],JP2=m[5114],VP2=m[5115],ZP2=m[5116],XP2=m[5117],bp2=m[5118],lp2=m[5119],cp2=m[5120],ip2=m[5121],ep2=m[5122],op2=m[5123],ap2=m[5124],np2=m[5125],hp2=m[5126],fp2=m[5127],tp2=m[5128],rp2=m[5129],sp2=m[5130],_p2=m[5131],up2=m[5132],vp2=m[5133],dp2=m[5134],wp2=m[5135],yp2=m[5136],Pp2=m[5137],pp2=m[5138],gp2=m[5139],zp2=m[5140],mp2=m[5141],xp2=m[5142],Ep2=m[5143],Ip2=m[5144],Rp2=m[5145],Fp2=m[5146],jp2=m[5147],Cp2=m[5148],kp2=m[5149],Lp2=m[5150],Tp2=m[5151],Sp2=m[5152],Mp2=m[5153],Bp2=m[5154],qp2=m[5155],Ap2=m[5156],Gp2=m[5157],Op2=m[5158],Np2=m[5159],Dp2=m[5160],$p2=m[5161],Qp2=m[5162],Up2=m[5163],Yp2=m[5164],Hp2=m[5165],Kp2=m[5166],Wp2=m[5167],Jp2=m[5168],Vp2=m[5169],Zp2=m[5170],Xp2=m[5171],bg2=m[5172],lg2=m[5173],cg2=m[5174],ig2=m[5175],eg2=m[5176],og2=m[5177],ag2=m[5178],ng2=m[5179],hg2=m[5180],fg2=m[5181],tg2=m[5182],rg2=m[5183],sg2=m[5184],_g2=m[5185],ug2=m[5186],vg2=m[5187],dg2=m[5188],wg2=m[5189],yg2=m[5190],Pg2=m[5191],pg2=m[5192],gg2=m[5193],zg2=m[5194],mg2=m[5195],xg2=m[5196],Eg2=m[5197],Ig2=m[5198],Rg2=m[5199],Fg2=m[5200],jg2=m[5201],Cg2=m[5202],kg2=m[5203],Lg2=m[5204],Tg2=m[5205],Sg2=m[5206],Mg2=m[5207],Bg2=m[5208],qg2=m[5209],Ag2=m[5210],Gg2=m[5211],Og2=m[5212],Ng2=m[5213],Dg2=m[5214],$g2=m[5215],Qg2=m[5216],Ug2=m[5217],Yg2=m[5218],Hg2=m[5219],Kg2=m[5220],Wg2=m[5221],Jg2=m[5222],Vg2=m[5223],Zg2=m[5224],Xg2=m[5225],bz2=m[5226],lz2=m[5227],cz2=m[5228],iz2=m[5229],ez2=m[5230],oz2=m[5231],az2=m[5232],nz2=m[5233],hz2=m[5234],fz2=m[5235],tz2=m[5236],rz2=m[5237],sz2=m[5238],_z2=m[5239],uz2=m[5240],vz2=m[5241],dz2=m[5242],wz2=m[5243],yz2=m[5244],Pz2=m[5245],pz2=m[5246],gz2=m[5247],zz2=m[5248],mz2=m[5249],xz2=m[5250],Ez2=m[5251],Iz2=m[5252],Rz2=m[5253],Fz2=m[5254],jz2=m[5255],Cz2=m[5256],kz2=m[5257],Lz2=m[5258],Tz2=m[5259],Sz2=m[5260],Mz2=m[5261],Bz2=m[5262],qz2=m[5263],Az2=m[5264],Gz2=m[5265],Oz2=m[5266],Nz2=m[5267],Dz2=m[5268],$z2=m[5269],Qz2=m[5270],Uz2=m[5271],Yz2=m[5272],Hz2=m[5273],Kz2=m[5274],Wz2=m[5275],Jz2=m[5276],Vz2=m[5277],Zz2=m[5278],Xz2=m[5279],bm2=m[5280],lm2=m[5281],cm2=m[5282],im2=m[5283],em2=m[5284],om2=m[5285],am2=m[5286],nm2=m[5287],hm2=m[5288],fm2=m[5289],tm2=m[5290],rm2=m[5291],sm2=m[5292],_m2=m[5293],um2=m[5294],vm2=m[5295],dm2=m[5296],wm2=m[5297],ym2=m[5298],Pm2=m[5299],pm2=m[5300],gm2=m[5301],zm2=m[5302],mm2=m[5303],xm2=m[5304],Em2=m[5305],Im2=m[5306],Rm2=m[5307],Fm2=m[5308],jm2=m[5309],Cm2=m[5310],km2=m[5311],Lm2=m[5312],Tm2=m[5313],Sm2=m[5314],Mm2=m[5315],Bm2=m[5316],qm2=m[5317],Am2=m[5318],Gm2=m[5319],Om2=m[5320],Nm2=m[5321],Dm2=m[5322],$m2=m[5323],Qm2=m[5324],Um2=m[5325],Ym2=m[5326],Hm2=m[5327],Km2=m[5328],Wm2=m[5329],Jm2=m[5330],Vm2=m[5331],Zm2=m[5332],Xm2=m[5333],bx2=m[5334],lx2=m[5335],cx2=m[5336],ix2=m[5337],ex2=m[5338],ox2=m[5339],ax2=m[5340],nx2=m[5341],hx2=m[5342],fx2=m[5343],tx2=m[5344],rx2=m[5345],sx2=m[5346],_x2=m[5347],ux2=m[5348],vx2=m[5349],dx2=m[5350],wx2=m[5351],yx2=m[5352],Px2=m[5353],px2=m[5354],gx2=m[5355],zx2=m[5356],mx2=m[5357],xx2=m[5358],Ex2=m[5359],Ix2=m[5360],Rx2=m[5361],Fx2=m[5362],jx2=m[5363],Cx2=m[5364],kx2=m[5365],Lx2=m[5366],Tx2=m[5367],Sx2=m[5368],Mx2=m[5369],Bx2=m[5370],qx2=m[5371],Ax2=m[5372],Gx2=m[5373],Ox2=m[5374],Nx2=m[5375],Dx2=m[5376],$x2=m[5377],Qx2=m[5378],Ux2=m[5379],Yx2=m[5380],Hx2=m[5381],Kx2=m[5382],Wx2=m[5383],Jx2=m[5384],Vx2=m[5385],Zx2=m[5386],Xx2=m[5387],bE2=m[5388],lE2=m[5389],cE2=m[5390],iE2=m[5391],eE2=m[5392],oE2=m[5393],aE2=m[5394],nE2=m[5395],hE2=m[5396],fE2=m[5397],tE2=m[5398],rE2=m[5399],sE2=m[5400],_E2=m[5401],uE2=m[5402],vE2=m[5403],dE2=m[5405],wE2=m[5406],yE2=m[5407],PE2=m[5408],pE2=m[5409],gE2=m[5410],zE2=m[5411],mE2=m[5412],xE2=m[5413],EE2=m[5414],IE2=m[5415],RE2=m[5416],FE2=m[5417],jE2=m[5418],CE2=m[5419],kE2=m[5420],LE2=m[5421],TE2=m[5422],SE2=m[5423],ME2=m[5424],BE2=m[5425],qE2=m[5426],AE2=m[5427],GE2=m[5428],OE2=m[5429],NE2=m[5430],DE2=m[5431],$E2=m[5432],QE2=m[5433],UE2=m[5434],YE2=m[5435],HE2=m[5436],KE2=m[5437],WE2=m[5438],JE2=m[5439],VE2=m[5440],ZE2=m[5441],XE2=m[5442],bI2=m[5443],lI2=m[5444],cI2=m[5445],iI2=m[5446],eI2=m[5447],oI2=m[5448],aI2=m[5449],nI2=m[5450],hI2=m[5451],fI2=m[5452],tI2=m[5453],rI2=m[5454],sI2=m[5455],_I2=m[5456],uI2=m[5457],vI2=m[5458],dI2=m[5459],wI2=m[5460],yI2=m[5461],PI2=m[5462],pI2=m[5463],gI2=m[5464],zI2=m[5465],mI2=m[5466],xI2=m[5467],EI2=m[5468],II2=m[5469],RI2=m[5470],FI2=m[5471],jI2=m[5472],CI2=m[5473],kI2=m[5474],LI2=m[5475],TI2=m[5476],SI2=m[5477],MI2=m[5478],BI2=m[5479],qI2=m[5480],AI2=m[5481],GI2=m[5482],OI2=m[5483],NI2=m[5484],DI2=m[5485],$I2=m[5486],QI2=m[5487],UI2=m[5488],YI2=m[5489],HI2=m[5490],KI2=m[5491],WI2=m[5492],JI2=m[5493],VI2=m[5494],ZI2=m[5495],XI2=m[5496],bR2=m[5497],lR2=m[5498],cR2=m[5499],iR2=m[5500],eR2=m[5501],oR2=m[5502],aR2=m[5503],nR2=m[5504],hR2=m[5505],fR2=m[5506],tR2=m[5507],rR2=m[5508],sR2=m[5509],_R2=m[5510],uR2=m[5511],vR2=m[5512],dR2=m[5513],wR2=m[5514],yR2=m[5515],PR2=m[5516],pR2=m[5517],gR2=m[5518],zR2=m[5519],mR2=m[5520],xR2=m[5521],ER2=m[5522],IR2=m[5523],RR2=m[5524],FR2=m[5525],jR2=m[5526],CR2=m[5527],kR2=m[5528],LR2=m[5529],TR2=m[5530],SR2=m[5531],MR2=m[5532],BR2=m[5533],qR2=m[5534],AR2=m[5535],GR2=m[5536],OR2=m[5537],NR2=m[5538],DR2=m[5539],$R2=m[5540],QR2=m[5541],UR2=m[5542],YR2=m[5543],HR2=m[5544],KR2=m[5545],WR2=m[5546],JR2=m[5547],VR2=m[5548],ZR2=m[5549],XR2=m[5550],bF2=m[5551],lF2=m[5552],cF2=m[5553],iF2=m[5554],eF2=m[5555],oF2=m[5556],aF2=m[5557],nF2=m[5558],hF2=m[5559],fF2=m[5560],tF2=m[5561],rF2=m[5562],sF2=m[5563],_F2=m[5564],uF2=m[5565],vF2=m[5566],dF2=m[5567],wF2=m[5568],yF2=m[5569],PF2=m[5570],pF2=m[5571],gF2=m[5572],zF2=m[5573],mF2=m[5574],xF2=m[5575],EF2=m[5576],IF2=m[5577],RF2=m[5578],FF2=m[5579],jF2=m[5580],CF2=m[5581],kF2=m[5582],LF2=m[5583],TF2=m[5584],SF2=m[5585],MF2=m[5586],BF2=m[5587],qF2=m[5588],AF2=m[5589],GF2=m[5590],OF2=m[5591],NF2=m[5592],DF2=m[5593],$F2=m[5594],QF2=m[5595],UF2=m[5596],YF2=m[5597],HF2=m[5598],KF2=m[5599],WF2=m[5600],JF2=m[5601],VF2=m[5602],ZF2=m[5603],XF2=m[5604],bj2=m[5605],lj2=m[5606],cj2=m[5607],ij2=m[5608],ej2=m[5609],oj2=m[5610],aj2=m[5611],nj2=m[5612],hj2=m[5613],fj2=m[5614],tj2=m[5615],rj2=m[5616],sj2=m[5617],_j2=m[5618],uj2=m[5619],vj2=m[5620],dj2=m[5621],wj2=m[5622],yj2=m[5623],Pj2=m[5624],pj2=m[5625],gj2=m[5626],zj2=m[5627],mj2=m[5628],xj2=m[5629],Ej2=m[5630],Ij2=m[5631],Rj2=m[5632],Fj2=m[5633],jj2=m[5634],Cj2=m[5635],kj2=m[5636],Lj2=m[5637],Tj2=m[5638],Sj2=m[5639],Mj2=m[5640],Bj2=m[5641],qj2=m[5642],Aj2=m[5643],Gj2=m[5644],Oj2=m[5645],Nj2=m[5646],Dj2=m[5647],$j2=m[5648],Qj2=m[5649],Uj2=m[5650],Yj2=m[5651],Hj2=m[5652],Kj2=m[5653],Wj2=m[5654],Jj2=m[5655],Vj2=m[5656],Zj2=m[5657],Xj2=m[5658],bC2=m[5659],lC2=m[5660],cC2=m[5661],iC2=m[5662],eC2=m[5663],oC2=m[5664],aC2=m[5665],nC2=m[5666],hC2=m[5667],fC2=m[5668],tC2=m[5669],rC2=m[5670],sC2=m[5671],_C2=m[5672],uC2=m[5673],vC2=m[5674],dC2=m[5675],wC2=m[5676],yC2=m[5677],PC2=m[5678],pC2=m[5679],gC2=m[5680],zC2=m[5681],mC2=m[5682],xC2=m[5683],EC2=m[5684],IC2=m[5685],RC2=m[5686],FC2=m[5687],jC2=m[5688],CC2=m[5689],kC2=m[5690],LC2=m[5691],TC2=m[5692],SC2=m[5693],MC2=m[5694],BC2=m[5695],qC2=m[5696],AC2=m[5697],GC2=m[5698],OC2=m[5699],NC2=m[5700],DC2=m[5701],$C2=m[5702],QC2=m[5703],UC2=m[5704],YC2=m[5705],HC2=m[5706],KC2=m[5707],WC2=m[5708],JC2=m[5709],VC2=m[5710],ZC2=m[5711],XC2=m[5712],bk2=m[5713],lk2=m[5714],ck2=m[5715],ik2=m[5716],ek2=m[5717],ok2=m[5718],ak2=m[5719],nk2=m[5720],hk2=m[5721],fk2=m[5722],tk2=m[5723],rk2=m[5724],sk2=m[5725],_k2=m[5726],uk2=m[5727],vk2=m[5728],dk2=m[5729],wk2=m[5730],yk2=m[5731],Pk2=m[5732],pk2=m[5733],gk2=m[5734],zk2=m[5735],mk2=m[5736],xk2=m[5737],Ek2=m[5738],Ik2=m[5739],Rk2=m[5740],Fk2=m[5741],jk2=m[5742],Ck2=m[5743],kk2=m[5744],Lk2=m[5745],Tk2=m[5746],Sk2=m[5747],Mk2=m[5748],Bk2=m[5749],qk2=m[5750],Ak2=m[5751],Gk2=m[5752],Ok2=m[5753],Nk2=m[5754],Dk2=m[5755],$k2=m[5756],Qk2=m[5757],Uk2=m[5758],Yk2=m[5759],Hk2=m[5760],Kk2=m[5761],Wk2=m[5762],Jk2=m[5763],Vk2=m[5764],Zk2=m[5765],Xk2=m[5766],bL2=m[5767],lL2=m[5768],cL2=m[5769],iL2=m[5770],eL2=m[5771],oL2=m[5772],aL2=m[5773],nL2=m[5774],hL2=m[5775],fL2=m[5776],tL2=m[5777],rL2=m[5778],sL2=m[5779],_L2=m[5780],uL2=m[5781],vL2=m[5782],dL2=m[5783],wL2=m[5784],yL2=m[5785],PL2=m[5786],pL2=m[5787],gL2=m[5788],zL2=m[5789],mL2=m[5790],xL2=m[5791],EL2=m[5792],IL2=m[5793],RL2=m[5794],FL2=m[5795],jL2=m[5796],CL2=m[5797],kL2=m[5798],LL2=m[5799],TL2=m[5800],SL2=m[5801],ML2=m[5802],BL2=m[5803],qL2=m[5804],AL2=m[5805],GL2=m[5806],OL2=m[5807],NL2=m[5808],DL2=m[5809],$L2=m[5810],QL2=m[5811],UL2=m[5812],YL2=m[5813],HL2=m[5814],KL2=m[5815],WL2=m[5816],JL2=m[5817],VL2=m[5818],ZL2=m[5819],XL2=m[5820],bT2=m[5821],lT2=m[5822],cT2=m[5823],iT2=m[5824],eT2=m[5825],oT2=m[5826],aT2=m[5827],nT2=m[5828],hT2=m[5829],fT2=m[5830],tT2=m[5831],rT2=m[5832],sT2=m[5833],_T2=m[5834],uT2=m[5835],vT2=m[5836],dT2=m[5837],wT2=m[5838],yT2=m[5839],PT2=m[5840],pT2=m[5841],gT2=m[5842],zT2=m[5843],mT2=m[5844],xT2=m[5845],ET2=m[5846],IT2=m[5847],RT2=m[5848],FT2=m[5849],jT2=m[5850],CT2=m[5851],kT2=m[5852],LT2=m[5853],TT2=m[5854],ST2=m[5855],MT2=m[5856],BT2=m[5857],qT2=m[5858],AT2=m[5859],GT2=m[5860],OT2=m[5861],NT2=m[5862],DT2=m[5863],$T2=m[5864],QT2=m[5865],UT2=m[5866],YT2=m[5867],HT2=m[5868],KT2=m[5869],WT2=m[5870],JT2=m[5871],VT2=m[5872],ZT2=m[5873],XT2=m[5874],bS2=m[5875],lS2=m[5876],cS2=m[5877],iS2=m[5878],eS2=m[5879],oS2=m[5880],aS2=m[5881],nS2=m[5882],hS2=m[5883],fS2=m[5884],tS2=m[5885],rS2=m[5886],sS2=m[5887],_S2=m[5888],uS2=m[5889],vS2=m[5890],dS2=m[5891],wS2=m[5892],yS2=m[5893],PS2=m[5894],pS2=m[5895],gS2=m[5896],zS2=m[5897],mS2=m[5898],xS2=m[5899],ES2=m[5900],IS2=m[5901],RS2=m[5902],FS2=m[5903],jS2=m[5904],CS2=m[5905],kS2=m[5906],LS2=m[5907],TS2=m[5908],SS2=m[5909],MS2=m[5910],BS2=m[5911],qS2=m[5912],AS2=m[5913],GS2=m[5914],OS2=m[5915],NS2=m[5916],DS2=m[5917],$S2=m[5918],QS2=m[5919],US2=m[5920],YS2=m[5921],HS2=m[5922],KS2=m[5923],WS2=m[5924],JS2=m[5925],VS2=m[5926],ZS2=m[5927],XS2=m[5928],bM2=m[5929],lM2=m[5930],cM2=m[5931],iM2=m[5932],eM2=m[5933],oM2=m[5934],aM2=m[5935],nM2=m[5936],hM2=m[5937],fM2=m[5939],tM2=m[5940],rM2=m[5941],sM2=m[5942],_M2=m[5943],uM2=m[5944],vM2=m[5945],dM2=m[5946],wM2=m[5947],yM2=m[5948],PM2=m[5949],pM2=m[5950],gM2=m[5951],zM2=m[5952],mM2=m[5953],xM2=m[5954],EM2=m[5955],IM2=m[5956],RM2=m[5957],FM2=m[5958],jM2=m[5959],CM2=m[5960],kM2=m[5961],LM2=m[5962],TM2=m[5963],SM2=m[5964],MM2=m[5965],BM2=m[5966],qM2=m[5967],AM2=m[5968],GM2=m[5969],OM2=m[5970],NM2=m[5971],DM2=m[5972],$M2=m[5973],QM2=m[5974],UM2=m[5975],YM2=m[5976],HM2=m[5977],KM2=m[5978],WM2=m[5979],JM2=m[5980],VM2=m[5981],ZM2=m[5982],XM2=m[5983],bB2=m[5984],lB2=m[5985],cB2=m[5986],iB2=m[5987],eB2=m[5988],oB2=m[5989],aB2=m[5990],nB2=m[5991],hB2=m[5992],fB2=m[5993],tB2=m[5994],rB2=m[5995],sB2=m[5996],_B2=m[5997],uB2=m[5998],vB2=m[5999],dB2=m[6e3],wB2=m[6001],yB2=m[6002],PB2=m[6003],pB2=m[6004],gB2=m[6005],zB2=m[6006],mB2=m[6007],xB2=m[6008],EB2=m[6009],IB2=m[6010],RB2=m[6011],FB2=m[6012],jB2=m[6013],CB2=m[6014],kB2=m[6015],LB2=m[6016],TB2=m[6017],SB2=m[6018],MB2=m[6019],BB2=m[6020],qB2=m[6021],AB2=m[6022],GB2=m[6023],OB2=m[6024],NB2=m[6025],DB2=m[6026],$B2=m[6027],QB2=m[6028],UB2=m[6029],YB2=m[6030],HB2=m[6031],KB2=m[6032],WB2=m[6033],JB2=m[6034],VB2=m[6035],ZB2=m[6036],XB2=m[6037],bq2=m[6038],lq2=m[6039],cq2=m[6040],iq2=m[6041],eq2=m[6042],oq2=m[6043],aq2=m[6044],nq2=m[6045],hq2=m[6046],fq2=m[6047],tq2=m[6048],rq2=m[6049],sq2=m[6050],_q2=m[6051],uq2=m[6052],vq2=m[6053],dq2=m[6054],wq2=m[6055],yq2=m[6056],Pq2=m[6057],pq2=m[6058],gq2=m[6059],zq2=m[6060],mq2=m[6061],xq2=m[6062],Eq2=m[6063],Iq2=m[6064],Rq2=m[6065],Fq2=m[6066],jq2=m[6067],Cq2=m[6068],kq2=m[6069],Lq2=m[6070],Tq2=m[6071],Sq2=m[6072],Mq2=m[6073],Bq2=m[6074],qq2=m[6075],Aq2=m[6076],Gq2=m[6077],Oq2=m[6078],Nq2=m[6079],Dq2=m[6080],$q2=m[6081],Qq2=m[6082],Uq2=m[6083],Yq2=m[6084],Hq2=m[6085],Kq2=m[6086],Wq2=m[6087],Jq2=m[6088],Vq2=m[6089],Zq2=m[6090],Xq2=m[6091],bA2=m[6092],lA2=m[6093],cA2=m[6094],iA2=m[6095],eA2=m[6096],oA2=m[6097],aA2=m[6098],nA2=m[6099],hA2=m[6100],fA2=m[6101],tA2=m[6102],rA2=m[6103],sA2=m[6104],_A2=m[6105],uA2=m[6106],vA2=m[6107],dA2=m[6108],wA2=m[6109],yA2=m[6110],PA2=m[6111],pA2=m[6112],gA2=m[6113],zA2=m[6114],mA2=m[6115],xA2=m[6116],EA2=m[6117],IA2=m[6118],RA2=m[6119],FA2=m[6120],jA2=m[6121],CA2=m[6122],kA2=m[6123],LA2=m[6124],TA2=m[6125],SA2=m[6126],MA2=m[6127],BA2=m[6128],qA2=m[6129],AA2=m[6130],GA2=m[6131],OA2=m[6132],NA2=m[6133],DA2=m[6134],$A2=m[6135],QA2=m[6136],UA2=m[6137],YA2=m[6138],HA2=m[6139],KA2=m[6140],WA2=m[6141],JA2=m[6142],VA2=m[6143],ZA2=m[6144],XA2=m[6145],bG2=m[6146],lG2=m[6147],cG2=m[6148],iG2=m[6149],eG2=m[6150],oG2=m[6151],aG2=m[6152],nG2=m[6153],hG2=m[6154],fG2=m[6155],tG2=m[6156],rG2=m[6157],sG2=m[6158],_G2=m[6159],uG2=m[6160],vG2=m[6161],dG2=m[6162],wG2=m[6163],yG2=m[6164],PG2=m[6165],pG2=m[6166],gG2=m[6167],zG2=m[6168],mG2=m[6169],xG2=m[6170],EG2=m[6171],IG2=m[6172],RG2=m[6173],FG2=m[6174],jG2=m[6175],CG2=m[6176],kG2=m[6177],LG2=m[6178],TG2=m[6179],SG2=m[6180],MG2=m[6181],BG2=m[6182],qG2=m[6183],AG2=m[6184],GG2=m[6185],OG2=m[6186],NG2=m[6187],DG2=m[6188],$G2=m[6189],QG2=m[6190],UG2=m[6191],YG2=m[6192],HG2=m[6193],KG2=m[6194],WG2=m[6195],JG2=m[6196],VG2=m[6197],ZG2=m[6198],XG2=m[6199],bO2=m[6200],lO2=m[6201],cO2=m[6202],iO2=m[6203],eO2=m[6204],oO2=m[6205],aO2=m[6206],nO2=m[6207],hO2=m[6208],fO2=m[6209],tO2=m[6210],rO2=m[6211],sO2=m[6212],_O2=m[6213],uO2=m[6214],vO2=m[6215],dO2=m[6216],wO2=m[6217],yO2=m[6218],PO2=m[6219],pO2=m[6220],gO2=m[6221],zO2=m[6222],mO2=m[6223],xO2=m[6224],EO2=m[6225],IO2=m[6226],RO2=m[6227],FO2=m[6228],jO2=m[6229],CO2=m[6230],kO2=m[6231],LO2=m[6232],TO2=m[6233],SO2=m[6234],MO2=m[6235],BO2=m[6236],qO2=m[6237],AO2=m[6238],GO2=m[6239],OO2=m[6240],NO2=m[6241],DO2=m[6242],$O2=m[6243],QO2=m[6244],UO2=m[6245],YO2=m[6246],HO2=m[6247],KO2=m[6248],WO2=m[6249],JO2=m[6250],VO2=m[6251],ZO2=m[6252],XO2=m[6253],bN2=m[6254],lN2=m[6255],cN2=m[6256],iN2=m[6257],eN2=m[6258],oN2=m[6259],aN2=m[6260],nN2=m[6261],hN2=m[6262],fN2=m[6263],tN2=m[6264],rN2=m[6265],sN2=m[6266],_N2=m[6267],uN2=m[6268],vN2=m[6269],dN2=m[6270],wN2=m[6271],yN2=m[6272],PN2=m[6273],pN2=m[6274],gN2=m[6275],zN2=m[6276],mN2=m[6277],xN2=m[6278],EN2=m[6279],IN2=m[6280],RN2=m[6281],FN2=m[6282],jN2=m[6283],CN2=m[6284],kN2=m[6285],LN2=m[6286],TN2=m[6287],SN2=m[6288],MN2=m[6289],BN2=m[6290],qN2=m[6291],AN2=m[6292],GN2=m[6293],ON2=m[6294],NN2=m[6295],DN2=m[6296],$N2=m[6297],QN2=m[6298],UN2=m[6299],YN2=m[6300],HN2=m[6301],KN2=m[6302],WN2=m[6303],JN2=m[6304],VN2=m[6305],ZN2=m[6306],XN2=m[6307],bD2=m[6308],lD2=m[6309],cD2=m[6310],iD2=m[6311],eD2=m[6312],oD2=m[6313],aD2=m[6314],nD2=m[6315],hD2=m[6316],fD2=m[6317],tD2=m[6318],rD2=m[6319],sD2=m[6320],_D2=m[6321],uD2=m[6322],vD2=m[6323],dD2=m[6324],wD2=m[6325],yD2=m[6326],PD2=m[6327],pD2=m[6328],gD2=m[6329],zD2=m[6330],mD2=m[6331],xD2=m[6332],ED2=m[6333],ID2=m[6334],RD2=m[6335],FD2=m[6336],jD2=m[6337],CD2=m[6338],kD2=m[6339],LD2=m[6340],TD2=m[6341],SD2=m[6342],MD2=m[6343],BD2=m[6344],qD2=m[6345],AD2=m[6346],GD2=m[6347],OD2=m[6348],ND2=m[6349],DD2=m[6350],$D2=m[6351],QD2=m[6352],UD2=m[6353],YD2=m[6354],HD2=m[6355],KD2=m[6356],WD2=m[6357],JD2=m[6358],VD2=m[6359],ZD2=m[6360],XD2=m[6361],b$2=m[6362],l$2=m[6363],c$2=m[6364],i$2=m[6365],e$2=m[6366],o$2=m[6367],a$2=m[6368],n$2=m[6369],h$2=m[6370],f$2=m[6371],t$2=m[6372],r$2=m[6373],s$2=m[6374],_$2=m[6375],u$2=m[6376],v$2=m[6377],d$2=m[6378],w$2=m[6379],y$2=m[6380],P$2=m[6381],p$2=m[6382],g$2=m[6383],z$2=m[6384],m$2=m[6385],x$2=m[6386],E$2=m[6387],I$2=m[6388],R$2=m[6389],F$2=m[6390],j$2=m[6391],C$2=m[6392],k$2=m[6393],L$2=m[6394],T$2=m[6395],S$2=m[6396],M$2=m[6397],B$2=m[6398],q$2=m[6399],A$2=m[6400],G$2=m[6401],O$2=m[6402],N$2=m[6403],D$2=m[6404],$$2=m[6405],Q$2=m[6406],U$2=m[6407],Y$2=m[6408],H$2=m[6409],K$2=m[6410],W$2=m[6411],J$2=m[6412],V$2=m[6413],Z$2=m[6414],X$2=m[6415],bQ2=m[6416],lQ2=m[6417],cQ2=m[6418],iQ2=m[6419],eQ2=m[6420],oQ2=m[6421],aQ2=m[6422],nQ2=m[6423],hQ2=m[6424],fQ2=m[6425],tQ2=m[6426],rQ2=m[6427],sQ2=m[6428],_Q2=m[6429],uQ2=m[6430],vQ2=m[6431],dQ2=m[6432],wQ2=m[6433],yQ2=m[6434],PQ2=m[6435],pQ2=m[6436],gQ2=m[6437],zQ2=m[6438],mQ2=m[6439],xQ2=m[6440],EQ2=m[6441],IQ2=m[6442],RQ2=m[6443],FQ2=m[6444],jQ2=m[6445],CQ2=m[6446],kQ2=m[6447],LQ2=m[6448],TQ2=m[6449],SQ2=m[6450],MQ2=m[6451],BQ2=m[6452],qQ2=m[6453],AQ2=m[6454],GQ2=m[6455],OQ2=m[6456],NQ2=m[6457],DQ2=m[6458],$Q2=m[6459],QQ2=m[6460],UQ2=m[6461],YQ2=m[6462],HQ2=m[6463],KQ2=m[6464],WQ2=m[6465],JQ2=m[6466],VQ2=m[6467],ZQ2=m[6468],XQ2=m[6469],bU2=m[6470],lU2=m[6471],cU2=m[6472],iU2=m[6473],eU2=m[6474],oU2=m[6475],aU2=m[6476],nU2=m[6477],hU2=m[6478],fU2=m[6479],tU2=m[6480],rU2=m[6481],sU2=m[6482],_U2=m[6483],uU2=m[6484],vU2=m[6485],dU2=m[6486],wU2=m[6487],yU2=m[6488],PU2=m[6489],pU2=m[6490],gU2=m[6491],zU2=m[6492],mU2=m[6493],xU2=m[6494],EU2=m[6495],IU2=m[6496],RU2=m[6497],FU2=m[6498],jU2=m[6499],CU2=m[6500],kU2=m[6501],LU2=m[6502],TU2=m[6503],SU2=m[6504],MU2=m[6505],BU2=m[6506],qU2=m[6507],AU2=m[6508],GU2=m[6509],OU2=m[6510],NU2=m[6511],DU2=m[6512],$U2=m[6513],QU2=m[6514],UU2=m[6515],YU2=m[6516],HU2=m[6517],KU2=m[6518],WU2=m[6519],JU2=m[6520],VU2=m[6521],ZU2=m[6522],XU2=m[6523],bY2=m[6524],lY2=m[6525],cY2=m[6526],iY2=m[6527],eY2=m[6528],oY2=m[6529],aY2=m[6530],nY2=m[6531],hY2=m[6532],fY2=m[6533],tY2=m[6534],rY2=m[6535],sY2=m[6536],_Y2=m[6537],uY2=m[6538],vY2=m[6539],dY2=m[6540],wY2=m[6541],yY2=m[6542],PY2=m[6543],pY2=m[6544],gY2=m[6545],zY2=m[6546],mY2=m[6547],xY2=m[6548],EY2=m[6549],IY2=m[6550],RY2=m[6551],FY2=m[6552],jY2=m[6553],CY2=m[6554],kY2=m[6555],LY2=m[6556],TY2=m[6557],SY2=m[6558],MY2=m[6559],BY2=m[6560],qY2=m[6561],AY2=m[6562],GY2=m[6563],OY2=m[6564],NY2=m[6565],DY2=m[6566],$Y2=m[6567],QY2=m[6568],UY2=m[6569],YY2=m[6570],HY2=m[6571],KY2=m[6572],WY2=m[6573],JY2=m[6574],VY2=m[6575],ZY2=m[6576],XY2=m[6577],bH2=m[6578],lH2=m[6579],cH2=m[6580],iH2=m[6581],eH2=m[6582],oH2=m[6583],aH2=m[6584],nH2=m[6585],hH2=m[6586],fH2=m[6587],tH2=m[6588],rH2=m[6589],sH2=m[6590],_H2=m[6591],uH2=m[6592],vH2=m[6593],dH2=m[6594],wH2=m[6595],yH2=m[6596],PH2=m[6597],pH2=m[6598],gH2=m[6599],zH2=m[6600],mH2=m[6601],xH2=m[6602],EH2=m[6603],IH2=m[6604],RH2=m[6605],FH2=m[6606],jH2=m[6607],CH2=m[6608],kH2=m[6609],LH2=m[6610],TH2=m[6611],SH2=m[6612],MH2=m[6613],BH2=m[6614],qH2=m[6615],AH2=m[6616],GH2=m[6617],OH2=m[6618],NH2=m[6619],DH2=m[6620],$H2=m[6621],QH2=m[6622],UH2=m[6623],YH2=m[6624],HH2=m[6625],KH2=m[6626],WH2=m[6627],JH2=m[6628],VH2=m[6629],ZH2=m[6630],XH2=m[6631],bK2=m[6632],lK2=m[6633],cK2=m[6634],iK2=m[6635],eK2=m[6636],oK2=m[6637],aK2=m[6638],nK2=m[6639],hK2=m[6640],fK2=m[6641],tK2=m[6642],rK2=m[6643],sK2=m[6644],_K2=m[6645],uK2=m[6646],vK2=m[6647],dK2=m[6648],wK2=m[6649],yK2=m[6650],PK2=m[6651],pK2=m[6652],gK2=m[6653],zK2=m[6654],mK2=m[6655],xK2=m[6656],EK2=m[6657],IK2=m[6658],RK2=m[6659],FK2=m[6660],jK2=m[6661],CK2=m[6662],kK2=m[6663],LK2=m[6664],TK2=m[6665],SK2=m[6666],MK2=m[6667],BK2=m[6669],qK2=m[6670],AK2=m[6671],GK2=m[6672],OK2=m[6673],NK2=m[6674],DK2=m[6675],$K2=m[6676],QK2=m[6677],UK2=m[6678],YK2=m[6679],HK2=m[6680],KK2=m[6681],WK2=m[6682],JK2=m[6683],VK2=m[6684],ZK2=m[6685],XK2=m[6686],bW2=m[6687],lW2=m[6688],cW2=m[6689],iW2=m[6690],eW2=m[6691],oW2=m[6692],aW2=m[6693],nW2=m[6694],hW2=m[6695],fW2=m[6696],tW2=m[6697],rW2=m[6698],sW2=m[6699],_W2=m[6700],uW2=m[6701],vW2=m[6702],dW2=m[6703],wW2=m[6704],yW2=m[6705],PW2=m[6706],pW2=m[6707],gW2=m[6708],zW2=m[6709],mW2=m[6710],xW2=m[6711],EW2=m[6712],IW2=m[6713],RW2=m[6714],FW2=m[6715],jW2=m[6716],CW2=m[6717],kW2=m[6718],LW2=m[6719],TW2=m[6720],SW2=m[6721],MW2=m[6722],BW2=m[6723],qW2=m[6724],AW2=m[6725],GW2=m[6726],OW2=m[6727],NW2=m[6728],DW2=m[6729],$W2=m[6730],QW2=m[6731],UW2=m[6732],YW2=m[6733],HW2=m[6734],KW2=m[6735],WW2=m[6736],JW2=m[6737],VW2=m[6738],ZW2=m[6739],XW2=m[6740],bJ2=m[6741],lJ2=m[6742],cJ2=m[6743],iJ2=m[6744],eJ2=m[6745],oJ2=m[6746],aJ2=m[6747],nJ2=m[6748],hJ2=m[6749],fJ2=m[6750],tJ2=m[6751],rJ2=m[6752],sJ2=m[6753],_J2=m[6754],uJ2=m[6755],vJ2=m[6756],dJ2=m[6757],wJ2=m[6758],yJ2=m[6759],PJ2=m[6760],pJ2=m[6761],gJ2=m[6762],zJ2=m[6763],mJ2=m[6764],xJ2=m[6765],EJ2=m[6766],IJ2=m[6767],RJ2=m[6768],FJ2=m[6769],jJ2=m[6770],CJ2=m[6771],kJ2=m[6772],LJ2=m[6773],TJ2=m[6774],SJ2=m[6775],MJ2=m[6776],BJ2=m[6777],qJ2=m[6778],AJ2=m[6779],GJ2=m[6780],OJ2=m[6781],NJ2=m[6782],DJ2=m[6783],$J2=m[6784],QJ2=m[6785],UJ2=m[6786],YJ2=m[6787],HJ2=m[6788],KJ2=m[6789],WJ2=m[6790],JJ2=m[6791],VJ2=m[6792],ZJ2=m[6793],XJ2=m[6794],bV2=m[6795],lV2=m[6796],cV2=m[6797],iV2=m[6798],eV2=m[6799],oV2=m[6800],aV2=m[6801],nV2=m[6802],hV2=m[6803],fV2=m[6804],tV2=m[6805],rV2=m[6806],sV2=m[6807],_V2=m[6808],uV2=m[6809],vV2=m[6810],dV2=m[6811],wV2=m[6812],yV2=m[6813],PV2=m[6814],pV2=m[6815],gV2=m[6816],zV2=m[6817],mV2=m[6818],xV2=m[6819],EV2=m[6820],IV2=m[6821],RV2=m[6822],FV2=m[6823],jV2=m[6824],CV2=m[6825],kV2=m[6826],LV2=m[6827],TV2=m[6828],SV2=m[6829],MV2=m[6830],BV2=m[6831],qV2=m[6832],AV2=m[6833],GV2=m[6834],OV2=m[6835],NV2=m[6836],DV2=m[6837],$V2=m[6838],QV2=m[6839],UV2=m[6840],YV2=m[6841],HV2=m[6842],KV2=m[6843],WV2=m[6844],JV2=m[6845],VV2=m[6846],ZV2=m[6847],XV2=m[6848],bZ2=m[6849],lZ2=m[6850],cZ2=m[6851],iZ2=m[6852],eZ2=m[6853],oZ2=m[6854],aZ2=m[6855],nZ2=m[6856],hZ2=m[6857],fZ2=m[6858],tZ2=m[6859],rZ2=m[6860],sZ2=m[6861],_Z2=m[6862],uZ2=m[6863],vZ2=m[6864],dZ2=m[6865],wZ2=m[6866],yZ2=m[6867],PZ2=m[6868],pZ2=m[6869],gZ2=m[6870],zZ2=m[6871],mZ2=m[6872],xZ2=m[6873],EZ2=m[6874],IZ2=m[6875],RZ2=m[6876],FZ2=m[6877],jZ2=m[6878],CZ2=m[6879],kZ2=m[6880],LZ2=m[6881],TZ2=m[6882],SZ2=m[6883],MZ2=m[6884],BZ2=m[6885],qZ2=m[6886],AZ2=m[6887],GZ2=m[6888],OZ2=m[6889],NZ2=m[6890],DZ2=m[6891],$Z2=m[6892],QZ2=m[6893],UZ2=m[6894],YZ2=m[6895],HZ2=m[6896],KZ2=m[6897],WZ2=m[6898],JZ2=m[6899],VZ2=m[6900],ZZ2=m[6901],XZ2=m[6902],bX2=m[6903],lX2=m[6904],cX2=m[6905],iX2=m[6906],eX2=m[6907],oX2=m[6908],aX2=m[6909],nX2=m[6910],hX2=m[6911],fX2=m[6912],tX2=m[6913],rX2=m[6914],sX2=m[6915],_X2=m[6917],uX2=m[6918],vX2=m[6919],dX2=m[6920],wX2=m[6921],yX2=m[6922],PX2=m[6923],pX2=m[6924],gX2=m[6925],zX2=m[6926],mX2=m[6927],xX2=m[6928],EX2=m[6929],IX2=m[6930],RX2=m[6931],FX2=m[6932],jX2=m[6933],CX2=m[6934],kX2=m[6935],LX2=m[6936],TX2=m[6937],SX2=m[6938],MX2=m[6939],BX2=m[6940],qX2=m[6941],AX2=m[6942],GX2=m[6943],OX2=m[6944],NX2=m[6945],DX2=m[6946],$X2=m[6947],QX2=m[6948],UX2=m[6949],YX2=m[6950],HX2=m[6951],KX2=m[6952],WX2=m[6953],JX2=m[6954],VX2=m[6955],ZX2=m[6956],XX2=m[6957],b21=m[6958],l21=m[6959],c21=m[6960],i21=m[6961],e21=m[6962],o21=m[6963],a21=m[6964],n21=m[6965],h21=m[6966],f21=m[6967],t21=m[6968],r21=m[6969],s21=m[6970],_21=m[6971],u21=m[6972],v21=m[6973],d21=m[6974],w21=m[6975],y21=m[6976],P21=m[6977],p21=m[6978],g21=m[6979],z21=m[6980],m21=m[6981],x21=m[6982],E21=m[6983],I21=m[6984],R21=m[6985],F21=m[6986],j21=m[6987],C21=m[6988],k21=m[6989],L21=m[6990],T21=m[6991],S21=m[6992],M21=m[6993],B21=m[6994],q21=m[6995],A21=m[6996],G21=m[6997],O21=m[6998],N21=m[6999],D21=m[7e3],$21=m[7001],Q21=m[7003],U21=m[7004],Y21=m[7005],H21=m[7006],K21=m[7007],W21=m[7008],J21=m[7009],V21=m[7010],Z21=m[7011],X21=m[7012],b11=m[7013],l11=m[7014],c11=m[7015],i11=m[7016],e11=m[7017],o11=m[7018],a11=m[7019],n11=m[7020],h11=m[7021],f11=m[7022],t11=m[7023],r11=m[7024],s11=m[7025],_11=m[7026],u11=m[7027],v11=m[7028],d11=m[7029],w11=m[7030],y11=m[7031],P11=m[7032],p11=m[7033],g11=m[7034],z11=m[7035],m11=m[7036],x11=m[7037],E11=m[7038],I11=m[7039],R11=m[7040],F11=m[7041],j11=m[7042],C11=m[7043],k11=m[7044],L11=m[7045],T11=m[7046],S11=m[7047],M11=m[7048],B11=m[7049],q11=m[7050],A11=m[7051],G11=m[7052],O11=m[7053],N11=m[7054],D11=m[7055],$11=m[7056],Q11=m[7057],U11=m[7058],Y11=m[7059],H11=m[7060],K11=m[7061],W11=m[7062],J11=m[7063],V11=m[7064],Z11=m[7065],X11=m[7066],b51=m[7067],l51=m[7068],c51=m[7069],i51=m[7070],e51=m[7071],o51=m[7072],a51=m[7073],n51=m[7074],h51=m[7075],f51=m[7076],t51=m[7077],r51=m[7078],s51=m[7079],_51=m[7080],u51=m[7081],v51=m[7082],d51=m[7083],w51=m[7084],y51=m[7085],P51=m[7086],p51=m[7087],g51=m[7088],z51=m[7089],m51=m[7090],x51=m[7091],E51=m[7092],I51=m[7093],R51=m[7094],F51=m[7095],j51=m[7096],C51=m[7097],k51=m[7098],L51=m[7099],T51=m[7100],S51=m[7101],M51=m[7102],B51=m[7103],q51=m[7104],A51=m[7105],G51=m[7106],O51=m[7107],N51=m[7108],D51=m[7109],$51=m[7110],Q51=m[7111],U51=m[7112],Y51=m[7113],H51=m[7114],K51=m[7115],W51=m[7116],J51=m[7117],V51=m[7118],Z51=m[7119],X51=m[7120],b71=m[7121],l71=m[7122],c71=m[7123],i71=m[7124],e71=m[7125],o71=m[7126],a71=m[7127],n71=m[7128],h71=m[7129],f71=m[7130],t71=m[7131],r71=m[7132],s71=m[7133],_71=m[7134],u71=m[7135],v71=m[7136],d71=m[7137],w71=m[7138],y71=m[7139],P71=m[7140],p71=m[7141],g71=m[7142],z71=m[7143],m71=m[7144],x71=m[7145],E71=m[7146],I71=m[7147],R71=m[7148],F71=m[7149],j71=m[7150],C71=m[7151],k71=m[7152],L71=m[7153],T71=m[7154],S71=m[7155],M71=m[7156],B71=m[7157],q71=m[7158],A71=m[7159],G71=m[7160],O71=m[7161],N71=m[7162],D71=m[7163],$71=m[7164],Q71=m[7165],U71=m[7166],Y71=m[7167],H71=m[7168],K71=m[7169],W71=m[7170],J71=m[7171],V71=m[7172],Z71=m[7173],X71=m[7174],b31=m[7175],l31=m[7176],c31=m[7177],i31=m[7178],e31=m[7179],o31=m[7180],a31=m[7181],n31=m[7182],h31=m[7183],f31=m[7184],t31=m[7185],r31=m[7186],s31=m[7187],_31=m[7188],u31=m[7189],v31=m[7190],d31=m[7191],w31=m[7192],y31=m[7193],P31=m[7194],p31=m[7195],g31=m[7196],z31=m[7197],m31=m[7198],x31=m[7199],E31=m[7200],I31=m[7201],R31=m[7202],F31=m[7203],j31=m[7204],C31=m[7205],k31=m[7206],L31=m[7207],T31=m[7208],S31=m[7209],M31=m[7210],B31=m[7211],q31=m[7212],A31=m[7213],G31=m[7214],O31=m[7215],N31=m[7216],D31=m[7217],$31=m[7218],Q31=m[7219],U31=m[7220],Y31=m[7221],H31=m[7222],K31=m[7223],W31=m[7224],J31=m[7225],V31=m[7226],Z31=m[7227],X31=m[7228],b41=m[7229],l41=m[7230],c41=m[7231],i41=m[7232],e41=m[7233],o41=m[7234],a41=m[7235],n41=m[7236],h41=m[7237],f41=m[7238],t41=m[7239],r41=m[7240],s41=m[7241],_41=m[7242],u41=m[7243],v41=m[7244],d41=m[7245],w41=m[7246],y41=m[7247],P41=m[7248],p41=m[7249],g41=m[7250],z41=m[7251],m41=m[7252],x41=m[7253],E41=m[7254],I41=m[7255],R41=m[7256],F41=m[7257],j41=m[7258],C41=m[7259],k41=m[7260],L41=m[7261],T41=m[7262],S41=m[7263],M41=m[7264],B41=m[7265],q41=m[7266],A41=m[7267],G41=m[7268],O41=m[7269],N41=m[7270],D41=m[7271],$41=m[7272],Q41=m[7273],U41=m[7274],Y41=m[7275],H41=m[7276],K41=m[7277],W41=m[7278],J41=m[7279],V41=m[7280],Z41=m[7281],X41=m[7282],b61=m[7283],l61=m[7284],c61=m[7285],i61=m[7286],e61=m[7287],o61=m[7288],a61=m[7289],n61=m[7290],h61=m[7291],f61=m[7292],t61=m[7293],r61=m[7294],s61=m[7295],_61=m[7296],u61=m[7298],v61=m[7299],d61=m[7300],w61=m[7301],y61=m[7302],P61=m[7303],p61=m[7304],g61=m[7305],z61=m[7306],m61=m[7307],x61=m[7308],E61=m[7309],I61=m[7310],R61=m[7311],F61=m[7312],j61=m[7313],C61=m[7314],k61=m[7315],L61=m[7316],T61=m[7317],S61=m[7318],M61=m[7319],B61=m[7320],q61=m[7321],A61=m[7322],G61=m[7323],O61=m[7324],N61=m[7325],D61=m[7326],$61=m[7327],Q61=m[7328],U61=m[7329],Y61=m[7330],H61=m[7331],K61=m[7332],W61=m[7333],J61=m[7334],V61=m[7335],Z61=m[7336],X61=m[7337],b01=m[7338],l01=m[7339],c01=m[7340],i01=m[7341],e01=m[7342],o01=m[7343],a01=m[7344],n01=m[7345],h01=m[7346],f01=m[7347],t01=m[7348],r01=m[7349],s01=m[7350],_01=m[7351],u01=m[7352],v01=m[7353],d01=m[7354],w01=m[7355],y01=m[7356],P01=m[7357],p01=m[7358],g01=m[7359],z01=m[7360],m01=m[7361],x01=m[7362],E01=m[7363],I01=m[7364],R01=m[7365],F01=m[7366],j01=m[7367],C01=m[7368],k01=m[7369],L01=m[7370],T01=m[7371],S01=m[7372],M01=m[7373],B01=m[7374],q01=m[7375],A01=m[7376],G01=m[7377],O01=m[7378],N01=m[7379],D01=m[7380],$01=m[7381],Q01=m[7382],U01=m[7383],Y01=m[7384],H01=m[7385],K01=m[7386],W01=m[7387],J01=m[7388],V01=m[7389],Z01=m[7390],X01=m[7391],b81=m[7392],l81=m[7393],c81=m[7394],i81=m[7395],e81=m[7396],o81=m[7397],a81=m[7398],n81=m[7399],h81=m[7400],f81=m[7401],t81=m[7402],r81=m[7403],s81=m[7404],_81=m[7405],u81=m[7406],v81=m[7407],d81=m[7408],w81=m[7409],y81=m[7410],P81=m[7411],p81=m[7412],g81=m[7413],z81=m[7414],m81=m[7415],x81=m[7416],E81=m[7417],I81=m[7418],R81=m[7419],F81=m[7420],j81=m[7421],C81=m[7422],k81=m[7423],L81=m[7424],T81=m[7425],S81=m[7426],M81=m[7427],B81=m[7428],q81=m[7429],A81=m[7430],G81=m[7431],O81=m[7432],N81=m[7433],D81=m[7434],$81=m[7435],Q81=m[7436],U81=m[7437],Y81=m[7438],H81=m[7439],K81=m[7440],W81=m[7441],J81=m[7442],V81=m[7443],Z81=m[7444],X81=m[7445],b91=m[7446],l91=m[7447],c91=m[7448],i91=m[7449],e91=m[7450],o91=m[7451],a91=m[7452],n91=m[7453],h91=m[7454],f91=m[7455],t91=m[7456],r91=m[7457],s91=m[7458],_91=m[7459],u91=m[7461],v91=m[7462],d91=m[7463],w91=m[7464],y91=m[7465],P91=m[7466],p91=m[7467],g91=m[7468],z91=m[7469],m91=m[7470],x91=m[7471],E91=m[7472],I91=m[7473],R91=m[7474],F91=m[7475],j91=m[7476],C91=m[7477],k91=m[7478],L91=m[7479],T91=m[7480],S91=m[7481],M91=m[7482],B91=m[7483],q91=m[7484],A91=m[7485],G91=m[7486],O91=m[7487],N91=m[7488],D91=m[7489],$91=m[7490],Q91=m[7491],U91=m[7492],Y91=m[7493],H91=m[7494],K91=m[7495],W91=m[7496],J91=m[7497],V91=m[7498],Z91=m[7499],X91=m[7500],bb1=m[7501],lb1=m[7502],cb1=m[7503],ib1=m[7504],eb1=m[7505],ob1=m[7506],ab1=m[7507],nb1=m[7508],hb1=m[7509],fb1=m[7510],tb1=m[7511],rb1=m[7512],sb1=m[7513],_b1=m[7514],ub1=m[7515],vb1=m[7516],db1=m[7517],wb1=m[7518],yb1=m[7519],Pb1=m[7520],pb1=m[7521],gb1=m[7522],zb1=m[7523],mb1=m[7524],xb1=m[7525],Eb1=m[7526],Ib1=m[7527],Rb1=m[7528],Fb1=m[7529],jb1=m[7530],Cb1=m[7531],kb1=m[7532],Lb1=m[7533],Tb1=m[7534],Sb1=m[7535],Mb1=m[7536],Bb1=m[7537],qb1=m[7538],Ab1=m[7539],Gb1=m[7540],Ob1=m[7541],Nb1=m[7542],Db1=m[7543],$b1=m[7544],Qb1=m[7545],Ub1=m[7546],Yb1=m[7547],Hb1=m[7548],Kb1=m[7549],Wb1=m[7550],Jb1=m[7551],Vb1=m[7552],Zb1=m[7553],Xb1=m[7554],bl1=m[7555],ll1=m[7556],cl1=m[7557],il1=m[7558],el1=m[7559],ol1=m[7560],al1=m[7561],nl1=m[7562],hl1=m[7563],fl1=m[7564],tl1=m[7565],rl1=m[7566],sl1=m[7567],_l1=m[7568],ul1=m[7569],vl1=m[7570],dl1=m[7571],wl1=m[7572],yl1=m[7573],Pl1=m[7574],pl1=m[7575],gl1=m[7576],zl1=m[7577],ml1=m[7578],xl1=m[7579],El1=m[7580],Il1=m[7581],Rl1=m[7582],Fl1=m[7583],jl1=m[7584],Cl1=m[7585],kl1=m[7586],Ll1=m[7587],Tl1=m[7588],Sl1=m[7589],Ml1=m[7590],Bl1=m[7591],ql1=m[7592],Al1=m[7593],Gl1=m[7594],Ol1=m[7595],Nl1=m[7596],Dl1=m[7597],$l1=m[7598],Ql1=m[7599],Ul1=m[7600],Yl1=m[7601],Hl1=m[7602],Kl1=m[7603],Wl1=m[7604],Jl1=m[7605],Vl1=m[7606],Zl1=m[7607],Xl1=m[7608],bc1=m[7609],lc1=m[7610],cc1=m[7611],ic1=m[7612],ec1=m[7613],oc1=m[7614],ac1=m[7615],nc1=m[7616],hc1=m[7617],fc1=m[7618],tc1=m[7619],rc1=m[7620],sc1=m[7621],_c1=m[7622],uc1=m[7623],vc1=m[7624],dc1=m[7625],wc1=m[7626],yc1=m[7627],Pc1=m[7628],pc1=m[7629],gc1=m[7630],zc1=m[7631],mc1=m[7632],xc1=m[7633],Ec1=m[7634],Ic1=m[7635],Rc1=m[7636],Fc1=m[7637],jc1=m[7638],Cc1=m[7639],kc1=m[7640],Lc1=m[7641],Tc1=m[7642],Sc1=m[7643],Mc1=m[7644],Bc1=m[7645],qc1=m[7646],Ac1=m[7647],Gc1=m[7648],Oc1=m[7649],Nc1=m[7650],Dc1=m[7651],$c1=m[7652],Qc1=m[7653],Uc1=m[7654],Yc1=m[7655],Hc1=m[7656],Kc1=m[7657],Wc1=m[7658],Jc1=m[7659],Vc1=m[7660],Zc1=m[7661],Xc1=m[7662],bi1=m[7663],li1=m[7664],ci1=m[7665],ii1=m[7666],ei1=m[7667],oi1=m[7668],ai1=m[7669],ni1=m[7670],hi1=m[7671],fi1=m[7672],ti1=m[7673],ri1=m[7674],si1=m[7675],_i1=m[7676],ui1=m[7677],vi1=m[7678],di1=m[7679],wi1=m[7680],yi1=m[7681],Pi1=m[7682],pi1=m[7683],gi1=m[7684],zi1=m[7685],mi1=m[7686],xi1=m[7687],Ei1=m[7688],Ii1=m[7689],Ri1=m[7690],Fi1=m[7691],ji1=m[7692],Ci1=m[7693],ki1=m[7694],Li1=m[7695],Ti1=m[7696],Si1=m[7697],Mi1=m[7698],Bi1=m[7699],qi1=m[7700],Ai1=m[7701],Gi1=m[7702],Oi1=m[7703],Ni1=m[7704],Di1=m[7705],$i1=m[7706],Qi1=m[7707],Ui1=m[7708],Yi1=m[7709],Hi1=m[7710],Ki1=m[7711],Wi1=m[7712],Ji1=m[7713],Vi1=m[7714],Zi1=m[7715],Xi1=m[7716],be1=m[7717],le1=m[7718],ce1=m[7719],ie1=m[7720],ee1=m[7721],oe1=m[7722],ae1=m[7723],ne1=m[7724],he1=m[7725],fe1=m[7726],te1=m[7727],re1=m[7728],se1=m[7729],_e1=m[7730],ue1=m[7731],ve1=m[7732],de1=m[7733],we1=m[7734],ye1=m[7735],Pe1=m[7736],pe1=m[7737],ge1=m[7738],ze1=m[7739],me1=m[7740],xe1=m[7741],Ee1=m[7742],Ie1=m[7743],Re1=m[7744],Fe1=m[7745],je1=m[7746],Ce1=m[7747],ke1=m[7748],Le1=m[7749],Te1=m[7750],Se1=m[7751],Me1=m[7752],Be1=m[7753],qe1=m[7754],Ae1=m[7755],Ge1=m[7756],Oe1=m[7757],Ne1=m[7758],De1=m[7759],$e1=m[7760],Qe1=m[7761],Ue1=m[7762],Ye1=m[7763],He1=m[7764],Ke1=m[7765],We1=m[7766],Je1=m[7767],Ve1=m[7768],Ze1=m[7769],Xe1=m[7770],bo1=m[7771],lo1=m[7772],co1=m[7773],io1=m[7774],eo1=m[7775],oo1=m[7776],ao1=m[7777],no1=m[7778],ho1=m[7779],fo1=m[7780],to1=m[7781],ro1=m[7782],so1=m[7783],_o1=m[7784],uo1=m[7785],vo1=m[7786],do1=m[7787],wo1=m[7788],yo1=m[7789],Po1=m[7790],po1=m[7791],go1=m[7792],zo1=m[7793],mo1=m[7794],xo1=m[7795],Eo1=m[7796],Io1=m[7797],Ro1=m[7798],Fo1=m[7799],jo1=m[7800],Co1=m[7801],ko1=m[7802],Lo1=m[7803],To1=m[7804],So1=m[7805],Mo1=m[7806],Bo1=m[7807],qo1=m[7808],Ao1=m[7809],Go1=m[7810],Oo1=m[7811],No1=m[7812],Do1=m[7813],$o1=m[7814],Qo1=m[7815],Uo1=m[7816],Yo1=m[7817],Ho1=m[7818],Ko1=m[7819],Wo1=m[7820],Jo1=m[7821],Vo1=m[7822],Zo1=m[7823],Xo1=m[7824],ba1=m[7825],la1=m[7826],ca1=m[7827],ia1=m[7828],ea1=m[7829],oa1=m[7830],aa1=m[7831],na1=m[7832],ha1=m[7833],fa1=m[7834],ta1=m[7835],ra1=m[7836],sa1=m[7837],_a1=m[7838],ua1=m[7839],va1=m[7840],da1=m[7841],wa1=m[7842],ya1=m[7843],Pa1=m[7844],pa1=m[7845],ga1=m[7846],za1=m[7847],ma1=m[7848],xa1=m[7849],Ea1=m[7850],Ia1=m[7851],Ra1=m[7852],Fa1=m[7853],ja1=m[7854],Ca1=m[7855],ka1=m[7856],La1=m[7857],Ta1=m[7858],Sa1=m[7859],Ma1=m[7860],Ba1=m[7861],qa1=m[7862],Aa1=m[7863],Ga1=m[7864],Oa1=m[7865],Na1=m[7866],Da1=m[7867],$a1=m[7868],Qa1=m[7869],Ua1=m[7870],Ya1=m[7871],Ha1=m[7872],Ka1=m[7873],Wa1=m[7874],Ja1=m[7875],Va1=m[7876],Za1=m[7877],Xa1=m[7878],bn1=m[7879],ln1=m[7880],cn1=m[7881],in1=m[7882],en1=m[7883],on1=m[7884],an1=m[7885],nn1=m[7886],hn1=m[7887],fn1=m[7888],tn1=m[7889],rn1=m[7890],sn1=m[7891],_n1=m[7892],un1=m[7893],vn1=m[7894],dn1=m[7895],wn1=m[7896],yn1=m[7897],Pn1=m[7898],pn1=m[7899],gn1=m[7900],zn1=m[7901],mn1=m[7902],xn1=m[7903],En1=m[7904],In1=m[7905],Rn1=m[7906],Fn1=m[7907],jn1=m[7908],Cn1=m[7909],kn1=m[7910],Ln1=m[7911],Tn1=m[7912],Sn1=m[7913],Mn1=m[7914],Bn1=m[7915],qn1=m[7916],An1=m[7917],Gn1=m[7918],On1=m[7919],Nn1=m[7920],Dn1=m[7921],$n1=m[7922],Qn1=m[7923],Un1=m[7924],Yn1=m[7925],Hn1=m[7926],Kn1=m[7927],Wn1=m[7928],Jn1=m[7929],Vn1=m[7930],Zn1=m[7931],Xn1=m[7932],bh1=m[7933],lh1=m[7934],ch1=m[7935],ih1=m[7936],eh1=m[7937],oh1=m[7938],ah1=m[7939],nh1=m[7940],hh1=m[7941],fh1=m[7942],th1=m[7943],rh1=m[7944],sh1=m[7945],_h1=m[7946],uh1=m[7947],vh1=m[7948],dh1=m[7949],wh1=m[7950],yh1=m[7951],Ph1=m[7952],ph1=m[7953],gh1=m[7954],zh1=m[7955],mh1=m[7956],xh1=m[7957],Eh1=m[7958],Ih1=m[7959],Rh1=m[7960],Fh1=m[7961],jh1=m[7962],Ch1=m[7963],kh1=m[7964],Lh1=m[7965],Th1=m[7966],Sh1=m[7967],Mh1=m[7968],Bh1=m[7969],qh1=m[7970],Ah1=m[7971],Gh1=m[7972],Oh1=m[7973],Nh1=m[7974],Dh1=m[7975],$h1=m[7976],Qh1=m[7977],Uh1=m[7978],Yh1=m[7979],Hh1=m[7980],Kh1=m[7981],Wh1=m[7982],Jh1=m[7983],Vh1=m[7984],Zh1=m[7985],Xh1=m[7986],bf1=m[7987],lf1=m[7988],cf1=m[7989],if1=m[7990],ef1=m[7991],of1=m[7992],af1=m[7993],nf1=m[7994],hf1=m[7995],ff1=m[7996],tf1=m[7997],rf1=m[7998],sf1=m[7999],_f1=m[8e3],uf1=m[8001],vf1=m[8002],df1=m[8003],wf1=m[8004],yf1=m[8005],Pf1=m[8006],pf1=m[8007],gf1=m[8009],zf1=m[8010],mf1=m[8011],xf1=m[8012],Ef1=m[8013],If1=m[8014],Rf1=m[8015],Ff1=m[8016],jf1=m[8017],Cf1=m[8018],kf1=m[8019],Lf1=m[8020],Tf1=m[8021],Sf1=m[8022],Mf1=m[8023],Bf1=m[8024],qf1=m[8025],Af1=m[8026],Gf1=m[8027],Of1=m[8028],Nf1=m[8029],Df1=m[8030],$f1=m[8031],Qf1=m[8032],Uf1=m[8033],Yf1=m[8034],Hf1=m[8035],Kf1=m[8036],Wf1=m[8037],Jf1=m[8038],Vf1=m[8039],Zf1=m[8040],Xf1=m[8041],bt1=m[8042],lt1=m[8043],ct1=m[8044],it1=m[8045],et1=m[8046],ot1=m[8047],at1=m[8048],nt1=m[8049],ht1=m[8050],ft1=m[8051],tt1=m[8052],rt1=m[8053],st1=m[8054],_t1=m[8055],ut1=m[8056],vt1=m[8057],dt1=m[8058],wt1=m[8059],yt1=m[8060],Pt1=m[8061],pt1=m[8062],gt1=m[8063],zt1=m[8064],mt1=m[8065],xt1=m[8066],Et1=m[8067],It1=m[8068],Rt1=m[8069],Ft1=m[8070],jt1=m[8071],Ct1=m[8072],kt1=m[8073],Lt1=m[8074],Tt1=m[8075],St1=m[8076],Mt1=m[8077],Bt1=m[8078],qt1=m[8079],At1=m[8080],Gt1=m[8081],Ot1=m[8082],Nt1=m[8083],Dt1=m[8084],$t1=m[8085],Qt1=m[8086],Ut1=m[8088],Yt1=m[8089],Ht1=m[8090],Kt1=m[8091],Wt1=m[8092],Jt1=m[8093],Vt1=m[8094],Zt1=m[8095],Xt1=m[8096],br1=m[8097],lr1=m[8098],cr1=m[8099],ir1=m[8100],er1=m[8101],or1=m[8102],ar1=m[8103],nr1=m[8104],hr1=m[8105],fr1=m[8106],tr1=m[8107],rr1=m[8108],sr1=m[8109],_r1=m[8110],ur1=m[8111],vr1=m[8112],dr1=m[8113],wr1=m[8114],yr1=m[8115],Pr1=m[8116],pr1=m[8117],gr1=m[8118],zr1=m[8119],mr1=m[8120],xr1=m[8121],Er1=m[8122],Ir1=m[8123],Rr1=m[8124],Fr1=m[8125],jr1=m[8126],Cr1=m[8127],kr1=m[8128],Lr1=m[8129],Tr1=m[8130],Sr1=m[8131],Mr1=m[8132],Br1=m[8133],qr1=m[8134],Ar1=m[8135],Gr1=m[8136],Or1=m[8137],Nr1=m[8139],Dr1=m[8140],$r1=m[8141],Qr1=m[8142],Ur1=m[8143],Yr1=m[8144],Hr1=m[8145],Kr1=m[8146],Wr1=m[8147],Jr1=m[8148],Vr1=m[8149],Zr1=m[8150],Xr1=m[8151],bs1=m[8152],ls1=m[8153],cs1=m[8154],is1=m[8155],es1=m[8156],os1=m[8157],as1=m[8158],ns1=m[8159],hs1=m[8160],fs1=m[8161],ts1=m[8162],rs1=m[8163],ss1=m[8164],_s1=m[8165],us1=m[8166],vs1=m[8167],ds1=m[8168],ws1=m[8169],ys1=m[8170],Ps1=m[8171],ps1=m[8172],gs1=m[8173],zs1=m[8174],ms1=m[8175],xs1=m[8176],Es1=m[8177],Is1=m[8178],Rs1=m[8179],Fs1=m[8180],js1=m[8181],Cs1=m[8182],ks1=m[8183],Ls1=m[8184],Ts1=m[8185],Ss1=m[8186],Ms1=m[8187],Bs1=m[8188],qs1=m[8189],As1=m[8190],Gs1=m[8191],Os1=m[8192],Ns1=m[8193],Ds1=m[8194],$s1=m[8195],Qs1=m[8196],Us1=m[8197],Ys1=m[8198],Hs1=m[8199],Ks1=m[8200],Ws1=m[8201],Js1=m[8202],Vs1=m[8203],Zs1=m[8204],Xs1=m[8205],b_1=m[8206],l_1=m[8207],c_1=m[8208],i_1=m[8209],e_1=m[8210],o_1=m[8211],a_1=m[8212],n_1=m[8213],h_1=m[8214],f_1=m[8215],t_1=m[8216],r_1=m[8217],s_1=m[8218],__1=m[8219],u_1=m[8220],v_1=m[8221],d_1=m[8222],w_1=m[8223],y_1=m[8224],P_1=m[8225],p_1=m[8226],g_1=m[8227],z_1=m[8228],m_1=m[8229],x_1=m[8230],E_1=m[8231],I_1=m[8232],R_1=m[8233],F_1=m[8234],j_1=m[8235],C_1=m[8236],k_1=m[8237],L_1=m[8238],T_1=m[8239],S_1=m[8240],M_1=m[8241],B_1=m[8242],q_1=m[8243],A_1=m[8244],G_1=m[8245],O_1=m[8246],N_1=m[8247],D_1=m[8248],$_1=m[8249],Q_1=m[8250],U_1=m[8251],Y_1=m[8252],H_1=m[8253],K_1=m[8254],W_1=m[8255],J_1=m[8256],V_1=m[8257],Z_1=m[8258],X_1=m[8259],bu1=m[8260],lu1=m[8261],cu1=m[8262],iu1=m[8263],eu1=m[8264],ou1=m[8265],au1=m[8266],nu1=m[8267],hu1=m[8268],fu1=m[8269],tu1=m[8270],ru1=m[8271],su1=m[8272],_u1=m[8273],uu1=m[8274],vu1=m[8275],du1=m[8276],wu1=m[8277],yu1=m[8278],Pu1=m[8279],pu1=m[8280],gu1=m[8281],zu1=m[8282],mu1=m[8283],xu1=m[8284],Eu1=m[8285],Iu1=m[8286],Ru1=m[8287],Fu1=m[8288],ju1=m[8289],Cu1=m[8290],ku1=m[8291],Lu1=m[8292],Tu1=m[8293],Su1=m[8294],Mu1=m[8295],Bu1=m[8296],qu1=m[8297],Au1=m[8298],Gu1=m[8299],Ou1=m[8300],Nu1=m[8301],Du1=m[8302],$u1=m[8303],Qu1=m[8304],Uu1=m[8305],Yu1=m[8306],Hu1=m[8307],Ku1=m[8308],Wu1=m[8309],Ju1=m[8310],Vu1=m[8311],Zu1=m[8312],Xu1=m[8313],bv1=m[8314],lv1=m[8315],cv1=m[8316],iv1=m[8317],ev1=m[8318],ov1=m[8319],av1=m[8320],nv1=m[8321],hv1=m[8322],fv1=m[8323],tv1=m[8324],rv1=m[8325],sv1=m[8326],_v1=m[8327],uv1=m[8328],vv1=m[8329],dv1=m[8330],wv1=m[8331],yv1=m[8332],Pv1=m[8333],pv1=m[8334],gv1=m[8335],zv1=m[8336],mv1=m[8337],xv1=m[8338],Ev1=m[8339],Iv1=m[8340],Rv1=m[8341],Fv1=m[8342],jv1=m[8343],Cv1=m[8344],kv1=m[8345],Lv1=m[8346],Tv1=m[8347],Sv1=m[8348],Mv1=m[8349],Bv1=m[8350],qv1=m[8351],Av1=m[8352],Gv1=m[8353],Ov1=m[8354],Nv1=m[8355],Dv1=m[8356],$v1=m[8357],Qv1=m[8358],Uv1=m[8359],Yv1=m[8360],Hv1=m[8361],Kv1=m[8362],Wv1=m[8363],Jv1=m[8364],Vv1=m[8365],Zv1=m[8366],Xv1=m[8367],bd1=m[8368],ld1=m[8369],cd1=m[8370],id1=m[8371],ed1=m[8372],od1=m[8373],ad1=m[8374],nd1=m[8375],hd1=m[8376],fd1=m[8377],td1=m[8378],rd1=m[8379],sd1=m[8380],_d1=m[8381],ud1=m[8382],vd1=m[8383],dd1=m[8384],wd1=m[8385],yd1=m[8386],Pd1=m[8387],pd1=m[8388],gd1=m[8389],zd1=m[8390],md1=m[8391],xd1=m[8392],Ed1=m[8393],Id1=m[8394],Rd1=m[8395],Fd1=m[8396],jd1=m[8397],Cd1=m[8398],kd1=m[8399],Ld1=m[8400],Td1=m[8401],Sd1=m[8402],Md1=m[8403],Bd1=m[8404],qd1=m[8405],Ad1=m[8406],Gd1=m[8407],Od1=m[8408],Nd1=m[8409],Dd1=m[8410],$d1=m[8411],Qd1=m[8412],Ud1=m[8413],Yd1=m[8414],Hd1=m[8415],Kd1=m[8416],Wd1=m[8417],Jd1=m[8418],Vd1=m[8419],Zd1=m[8420],Xd1=m[8421],bw1=m[8422],lw1=m[8423],cw1=m[8424],iw1=m[8425],ew1=m[8426],ow1=m[8427],aw1=m[8428],nw1=m[8429],hw1=m[8430],fw1=m[8431],tw1=m[8432],rw1=m[8433],sw1=m[8434],_w1=m[8435],uw1=m[8436],vw1=m[8437],dw1=m[8438],ww1=m[8439],yw1=m[8440],Pw1=m[8441],pw1=m[8442],gw1=m[8443],zw1=m[8444],mw1=m[8445],xw1=m[8446],Ew1=m[8447],Iw1=m[8448],Rw1=m[8449],Fw1=m[8450],jw1=m[8451],Cw1=m[8452],kw1=m[8453],Lw1=m[8454],Tw1=m[8455],Sw1=m[8456],Mw1=m[8457],Bw1=m[8458],qw1=m[8459],Aw1=m[8460],Gw1=m[8461],Ow1=m[8462],Nw1=m[8463],Dw1=m[8464],$w1=m[8465],Qw1=m[8466],Uw1=m[8467],Yw1=m[8468],Hw1=m[8469],Kw1=m[8470],Ww1=m[8471],Jw1=m[8472],Vw1=m[8473],Zw1=m[8474],Xw1=m[8475],by1=m[8476],ly1=m[8477],cy1=m[8478],iy1=m[8479],ey1=m[8480],oy1=m[8481],ay1=m[8482],ny1=m[8483],hy1=m[8484],fy1=m[8485],ty1=m[8486],ry1=m[8487],sy1=m[8488],_y1=m[8489],uy1=m[8490],vy1=m[8491],dy1=m[8492],wy1=m[8493],yy1=m[8494],Py1=m[8495],py1=m[8496],gy1=m[8497],zy1=m[8498],my1=m[8499],xy1=m[8500],Ey1=m[8501],Iy1=m[8502],Ry1=m[8503],Fy1=m[8504],jy1=m[8505],Cy1=m[8506],ky1=m[8507],Ly1=m[8508],Ty1=m[8509],Sy1=m[8510],My1=m[8511],By1=m[8512],qy1=m[8513],Ay1=m[8514],Gy1=m[8515],Oy1=m[8516],Ny1=m[8517],Dy1=m[8518],$y1=m[8519],Qy1=m[8520],Uy1=m[8521],Yy1=m[8522],Hy1=m[8523],Ky1=m[8524],Wy1=m[8525],Jy1=m[8526],Vy1=m[8527],Zy1=m[8528],Xy1=m[8529],bP1=m[8530],lP1=m[8531],cP1=m[8532],iP1=m[8533],eP1=m[8534],oP1=m[8535],aP1=m[8536],nP1=m[8537],hP1=m[8538],fP1=m[8539],tP1=m[8540],rP1=m[8541],sP1=m[8542],_P1=m[8543],uP1=m[8544],vP1=m[8545],dP1=m[8546],wP1=m[8547],yP1=m[8548],PP1=m[8549],pP1=m[8550],gP1=m[8551],zP1=m[8552],mP1=m[8553],xP1=m[8554],EP1=m[8555],IP1=m[8556],RP1=m[8557],FP1=m[8558],jP1=m[8559],CP1=m[8560],kP1=m[8561],LP1=m[8562],TP1=m[8563],SP1=m[8564],MP1=m[8565],BP1=m[8566],qP1=m[8567],AP1=m[8568],GP1=m[8569],OP1=m[8570],NP1=m[8571],DP1=m[8572],$P1=m[8573],QP1=m[8574],UP1=m[8575],YP1=m[8576],HP1=m[8577],KP1=m[8578];Cbb([],[]);for(;;){var M=e[i++];if(-1==[null,99954,99952,99876,99827,99781,99768,99730,99693,99621,99518,99426,99383,99379,99362,99322,99278,99248,99221,99158,99151,99128,98991,98945,98870,98809,98643,98641,98635,98535,98462,98359,98351,98239,98228,98092,98029,98018,98009,97999,97935,97856,97837,97783,97686,97596,97453,97437,97413,97374,97266,97254,97160,97127,97059,96954,96883,96776,96762,96735,96659,96657,96638,96628,96274,96225,96216,96172,96169,96110,96088,96085,96080,96043,95989,95979,95880,95777,95770,95683,95645,95596,95583,95559,95311,95286,95227,95190,95188,95167,95133,95012,94956,94867,94863,94831,94782,94768,94604,94555,94450,94428,94392,94313,94285,94244,94230,94160,94132,93963,93875,93777,93754,93730,93691,93637,93555,93505,93438,93429,93307,93283,93083,92919,92851,92838,92686,92594,92588,92506,92345,92270,92248,92215,92173,92096,92021,91982,91934,91856,91802,91797,91628,91582,91578,91542,91504,91427,91422,91410,91328,91220,91103,91090,91039,91023,91020,90938,90933,90918,90916,90908,90866,90859,90721,90610,90594,90563,90466,90464,90349,90324,90259,90227,90206,90191,90096,89946,89873,89858,89786,89595,89542,89484,89409,89401,89391,89272,89266,89199,89185,89183,89146,89106,89019,88999,88857,88745,88709,88698,88454,88357,88345,88314,88208,88198,88197,88194,88158,88054,88052,87847,87764,87737,87713,87554,87519,87511,87455,87350,87252,87217,87090,87065,87033,87026,87017,86983,86949,86907,86871,86843,86716,86714,86650,86572,86529,86453,86389,86379,86337,86214,86117,86031,85948,85924,85896,85876,85809,85756,85728,85699,85549,85548,85445,85408,85353,85301,85288,85230,85174,85171,85089,85085,85016,84977,84784,84774,84758,84677,84670,84614,84581,84573,84405,84302,84214,84210,84051,84028,83991,83965,83920,83916,83875,83858,83848,83815,83813,83671,83614,83475,83340,83307,83277,83270,83264,83031,83018,83013,82913,82812,82521,82506,82485,82409,82406,82402,82317,82315,82306,82265,82189,82078,82077,81941,81799,81691,81668,81652,81630,81616,81586,81501,81489,81472,81395,81348,81345,81325,81324,81202,81055,81e3,80983,80981,80958,80949,80895,80864,80862,80837,80796,80727,80726,80712,80611,80570,80562,80343,80250,80135,80129,80124,79994,79920,79914,79898,79875,79868,79810,79722,79711,79617,79474,79361,79350,79263,79248,79230,79180,79128,79048,79038,78993,78977,78808,78803,78800,78770,78762,78733,78731,78709,78671,78621,78395,78323,78321,78309,78308,78302,78271,78039,78e3,77946,77865,77765,77548,77519,77425,77402,77397,77380,77361,77315,77151,77117,77060,77007,76996,76963,76842,76745,76699,76684,76639,76638,76554,76547,76508,76404,76385,76339,76321,76307,76288,76257,76204,76159,76073,76035,76029,76026,76020,75937,75919,75626,75559,75519,75467,75421,75381,75366,75342,75313,75288,75284,75257,75145,75105,75085,75041,75009,74976,74880,74837,74758,74720,74692,74677,74651,74635,74596,74556,74521,74470,74306,74303,74250,74241,74154,74112,74056,74054,74045,73961,73935,73911,73741,73637,73604,73580,73574,73570,73566,73397,73254,73109,73069,73062,72907,72871,72791,72623,72608,72601,72583,72575,72563,72451,72391,72389,72244,72238,72218,72186,72173,72115,71904,71817,71807,71718,71688,71658,71624,71606,71527,71379,71341,71340,71306,71268,71151,71136,71121,71093,71081,71028,70972,70939,70936,70788,70757,70690,70676,70602,70370,70312,70299,70265,70261,70130,70070,70041,70010,69977,69930,69877,69859,69735,69692,69690,69685,69668,69648,69645,69624,69563,69508,69374,69323,69301,69186,69086,69064,69025,69020,69011,68885,68862,68835,68833,68827,68683,68644,68477,68416,68365,68343,68328,68244,68185,68180,68149,68139,68134,68126,68073,68012,67974,67957,67806,67805,67772,67718,67717,67703,67682,67619,67541,67476,67404,67320,67203,67170,67157,67127,67096,67057,67004,66932,66910,66888,66887,66817,66731,66729,66706,66698,66682,66647,66627,66595,66559,66558,66542,66511,66491,66392,66354,66353,66322,66285,66196,66095,66081,66001,65991,65921,65883,65870,65831,65802,65779,65680,65648,65501,65470,65407,65378,65112,65063,65025,64965,64954,64949,64895,64879,64822,64762,64750,64707,64650,64628,64542,64529,64476,64346,64277,64274,64258,64189,64153,64147,64110,64106,63972,63903,63656,63557,63531,63517,63474,63441,63363,63355,63274,63249,63186,63144,63019,63012,62917,62850,62834,62733,62720,62708,62566,62564,62435,62375,62328,62247,62226,62221,62205,62107,61935,61775,61752,61744,61681,61663,61609,61576,61479,61449,61349,61295,61242,61241,61215,61159,61095,61091,60956,60930,60859,60804,60798,60763,60754,60651,60637,60628,60605,60433,60421,60380,60242,60175,60038,59843,59839,59830,59809,59646,59592,59566,59557,59340,59324,59260,59201,59196,59182,59137,59121,59088,59040,59020,58989,58960,58928,58904,58854,58837,58767,58752,58735,58639,58511,58440,58376,58361,58306,58255,58249,58194,58139,58077,57979,57877,57808,57781,57730,57724,57511,57481,57404,57317,57293,57262,57217,57179,57126,57050,56876,56838,56822,56808,56689,56650,56632,56585,56565,56456,56397,56369,56368,56196,56123,56089,56027,56020,56014,55821,55810,55794,55708,55599,55571,55567,55513,55461,55451,55424,55395,55323,55276,55243,55236,55207,55201,55173,55098,55038,55019,54997,54943,54896,54884,54770,54677,54676,54473,54457,54335,54324,54253,54216,54184,54183,54054,54009,53995,53955,53903,53754,53614,53596,53501,53448,53320,53299,53151,53076,53031,53013,53012,52980,52878,52861,52836,52821,52431,52396,52377,52334,52331,52290,52189,52173,52136,52058,52050,52038,51984,51887,51849,51722,51683,51599,51520,51516,51512,51436,51368,51289,51259,51245,51161,51153,51104,51001,50993,50825,50759,50725,50688,50660,50630,50507,50472,50435,50201,50146,50130,50073,50046,49921,49906,49887,49852,49811,49680,49545,49541,49329,49304,49193,49100,49098,49023,48959,48906,48861,48755,48621,48530,48504,48503,48422,48416,48379,48290,48255,48248,48200,48189,48183,47762,47761,47742,47680,47670,47470,47409,47403,47391,47343,47329,47300,47282,47229,47148,47136,47020,47002,46954,46911,46792,46782,46702,46634,46517,46509,46335,46283,46279,46212,45973,45958,45930,45915,45850,45787,45770,45754,45731,45607,45573,45415,45367,45320,45301,45295,45243,45196,45162,45148,45128,45100,45035,45016,44982,44641,44605,44595,44588,44575,44509,44489,44380,44298,44295,44291,44288,44226,44223,44194,44095,44039,43932,43838,43823,43724,43548,43446,43444,43390,43341,43314,43285,43254,43251,43216,43212,43209,43202,43193,43176,43150,43135,43094,43071,43021,42991,42981,42859,42709,42624,42475,42269,42168,42136,42108,42106,42083,41887,41837,41709,41646,41632,41540,41505,41474,41465,41440,41421,41411,41373,41261,40961,40894,40855,40684,40675,40670,40665,40660,40640,40403,40365,40335,40241,40180,40134,40128,39969,39903,39902,39834,39789,39769,39764,39732,39567,39561,39479,39441,39419,39404,39365,39232,39203,39164,39085,39079,39076,39056,39027,39014,38959,38924,38899,38780,38734,38683,38576,38550,38549,38480,38460,38437,38382,38354,38338,38311,38302,38269,38232,38123,38e3,37998,37867,37656,37458,37455,37348,37316,37242,37209,37200,37136,36931,36912,36842,36799,36784,36719,36696,36695,36679,36523,36422,36297,36270,36249,36202,36174,36162,36151,36092,36069,35985,35984,35896,35894,35890,35873,35837,35828,35812,35794,35714,35669,35662,35468,35425,35412,35375,35320,35303,35259,35163,35112,35098,35096,34987,34962,34940,34933,34900,34848,34784,34707,34553,34458,34429,34428,34397,34299,34266,34264,34257,34228,34213,33970,33936,33783,33529,33501,33482,33457,33426,33414,33389,33355,33352,33338,33297,33291,33183,33141,33063,33043,32881,32852,32736,32729,32691,32689,32545,32528,32517,32498,32477,32397,32305,32275,32131,32090,32063,32047,31986,31891,31824,31787,31779,31724,31704,31700,31664,31514,31499,31496,31460,31442,31434,31423,31354,31340,31335,31307,31296,31163,31162,31118,30808,30744,30732,30711,30677,30582,30529,30506,30459,30449,30382,30381,30309,30296,30238,30182,30033,29908,29892,29607,29585,29583,29519,29478,29362,29242,29223,29191,29182,29172,29169,29064,29052,29034,28989,28890,28822,28741,28726,28655,28597,28567,28557,28478,28380,28330,28307,28197,28196,28156,28050,27998,27918,27863,27781,27712,27606,27557,27556,27484,27433,27429,27391,27381,27370,27251,27244,27212,27170,27118,26972,26909,26873,26858,26779,26739,26653,26577,26569,26516,26472,26431,26420,26396,26210,26172,26170,26088,26012,25979,25853,25841,25791,25728,25711,25655,25595,25541,25515,25296,25284,25247,25219,25141,25128,25114,25072,25022,25016,25008,24937,24933,24867,24710,24695,24678,24673,24653,24642,24607,24544,24521,24457,24453,24374,24361,24266,24181,24177,24157,24153,24070,24039,23915,23903,23825,23671,23583,23561,23478,23365,23364,23282,23185,23179,23171,23163,23105,23098,23046,22932,22894,22888,22848,22837,22771,22747,22745,22695,22670,22541,22512,22508,22433,22356,22101,22061,21962,21934,21894,21798,21715,21701,21653,21612,21585,21450,21418,21204,21105,21103,21091,21081,20777,20757,20722,20659,20640,20615,20593,20536,20427,20412,20376,20362,20350,20246,20109,19919,19862,19853,19825,19789,19640,19567,19398,19210,19109,19094,19054,19042,18922,18864,18852,18777,18757,18695,18653,18631,18570,18564,18504,18377,18135,18087,17938,17916,17891,17775,17482,17448,17262,17172,17133,17105,17070,17057,17054,17007,16987,16964,16910,16900,16895,16816,16782,16751,16633,16623,16485,16389,16325,16272,16196,16188,16069,16061,15979,15916,15881,15799,15795,15774,15765,15764,15713,15672,15657,15655,15514,15392,15357,15348,15308,15276,15219,15183,15102,15006,14987,14731,14648,14632,14612,14608,14567,14466,14416,14291,14266,14255,14239,14199,14178,14138,14124,13815,13780,13687,13630,13509,13468,13453,13387,13345,13286,13268,13263,13261,13252,13089,13021,13004,12941,12936,12777,12730,12676,12569,12562,12483,12343,12327,12239,12203,12109,11867,11832,11765,11748,11597,11594,11451,11383,11368,11324,11146,11125,11078,11006,11e3,10826,10810,10733,10667,10515,10414,10319,10217,10213,10163,10155,10151,10090,10057,10045,10007,9953,9948,9919,9894,9837,9832,9820,9694,9605,9586,9484,9467,9454,9409,9307,9300,9285,9228,9206,9196,9169,9121,9103,8945,8934,8902,8897,8869,8862,8836,8676,8578,8559,8485,8414,8378,8341,8231,8184,8158,8087,8080,7974,7969,7921,7823,7790,7787,7771,7716,7651,7609,7455,7398,7315,7304,7289,7286,7275,7151,7118,7100,6937,6905,6860,6855,6798,6794,6739,6713,6599,6568,6551,6435,6415,6381,6341,6314,6205,6165,6151,6132,6093,6060,6008,5967,5952,5886,5884,5879,5870,5834,5750,5659,5619,5497,5479,5466,5453,5428,5424,5377,5347,5185,5169,5071,4937,4886,4873,4804,4788,4623,4615,4592,4541,4499,4486,4427,4411,4267,4207,4146,4067,4062,4036,4004,3933,3838,3767,3749,3697,3615,3604,3511,3495,3414,3410,3409,3330,3306,3288,3158,2977,2886,2869,2858,2736,2732,2730,2504,2499,2486,2383,2375,2219,2158,2155,2152,2110,2056,2048,2029,2025,2020,1910,1908,1853,1828,1701,1680,1657,1625,1556,1482,1443,1390,1371,1361,1295,1014,982,966,940,766,594,591,563,519,466,420,399,318,273,237,224,176,164,77,76,59,53,47,29].indexOf(M))return"-90_cbb";if(M<50472)if(M<25247)if(M<11597)if(M<5886)if(M<2869)if(M<1482)if(M<519)if(M<224)M<76?M<53?M<30?(_=l91(t),t=Ri1(-_),f=Pc2(r),t=cU(r),t=EN(f=t<f)):(f=VV2(),t=Yn(),Hq2(f=t!=f),_=Qn2(r),s=Qt(!_)):M<54?(f=s_1(_),t=e[i++],f?(i+=t,v=uR2(f)):w=10):(f=$y1(u),t=g31(f),t=zv2(f>>>=t),f=r91(z),t=Vr1(f),_=pI2(f=f<=t)):M<164?M<77?(_=s$2(r),s=gY(!_)):(f=du(r),(t=KV(f)).push(f),f=P31(t),f=Nw({})):M<165?(f=xu2(r),t=gt1(s),_=Z71(f^=t),f=Kq(r),t=UT(r),t=K62(f=t<f)):(f=el2(r),t=na(f),f=S92(f=f in t),f=Yi(z),t=Aj(f),_=pb2(f=f<=t));else if(M<399)f=M<273?M<225?(f=go2(r),(t=Bk2(f)).push(f),f=fi2(t),f=rK(t),t=Oi2(f),b71(f=t-f)):(t=v81(h),zf({})):M<274?(_=E02(f),t=xv2(void 0),f=W02(r),t=R2(f),xi(f=f<t)):(f=v2({}),f=g8(r),(t=st1(f)).push(f),Xb2(t));else if(M<466)if(M<400){f=Ia2({}),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else _=I21(t),t=dg(-_),f=jc1(_),t=Gr1(u),v=aa1(f*=t);else t=Hl(h),_=bN2(f),t=lt2(void 0);else if(M<982)if(M<766)if(M<591)if(M<520)f=RY(r),t=e[i++],f?w=10:(i+=t,s=Zc(f)),f=Lv(r),t=fC(s),_=I81(f>>=t);else{f=gu2(t),t=B$(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=i02(f),f=$D2(t),t=JY2(f),r=F81(f=t===f)}else M<592?(f=R4(s),t=cm2(f),t=bp2(f=t<=f),f=Iw(r),(t=Df1(f)).push(f),f=ql(t)):(t=YL2([]),f=kx(r),t=J31(s),null!=(r=yS(_)).variablePool?n(r,t,f):r[t]=f);else{if(!(M<966))return;if(M<767)f=mv1(r),t=HD2(s),f=Ha2(_=qn2(f^=t)),t=vT(w),u=gF(f=t<f);else{for(f=UJ(s),t=bF2(_),u=F41(f-=t),f=e[i++],r=tn2(_),g=[],t=0;t<f;t++)g.splice(0,0,gD2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=i12(s)}}else M<1371?M<1295?M<983?(f=ly2(s),t=RV2(_),u=Ou2(f-=t),_=Q_(t),t=cJ2(-_)):(f=ZC2(s),t=Ic1(f),t=p02(f=LE2(t=cd(f=t<=f))),f=tt2(f+=t)):M<1296?(f=Wh(u),t=gt(f),t=nk(f>>>=t),f=$q2(u),t=e[i++],f?w=10:i+=t):(f=fr1(t),t=jn2(f),f=fR(f+=t),f=bs(u),t=nC(v),d=CJ(f|=t)):M<1443?(M<1372?(f=eR2(),t=qv(),f8(f=t!=f),function(){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,$W2()):s.splice(0,0,uL2());for(f=jH2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}):(f=Ud1(u),t=Hm(v),d=wH2(f|=t),function(){for(z.for_in_xh_cbb_list=p,f=e[i++],r=L12(_),g=[],t=0;t<f;t++)g.splice(0,0,aE2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=S(s)}))():(f=ie({}),f=MV(t),t=Q1(f),s=PB2(f=t==f));else if(M<2110)if(M<1908)if(M<1680)if(M<1625)M<1483?(f=Ut2(r),t=xo(r),t=oG(f=t<f),f=wu1(u),t=NB2(v),d=es2(f|=t)):(f=ed1(r),t=qD2(s),_=Je2(f%=t),f=uH(r),t=CD2(s),_=uX(f^=t));else if(M<1626){for(z.for_in_xh_cbb_list=p,f=e[i++],r=iK2(_),g=[],t=0;t<f;t++)g.splice(0,0,So2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=_71(s),f=J22(),t=b1(),PN2(f=t!=f)}else f=ye1(t),t=f_2(f),r=Yi1(f=t===f),f=C7(t),t=QG2(f),f=C3(f=t-f);else M<1828?M<1681?(f=KQ(z),t=tM2(f),_=eD(f=f<=t),f=mr1(t),t=ak(r),r=Ve(f&=t)):(f=Qr(t),r=e[i++],f?i+=r:w=9,f=iv1(t)):M<1829?(t=_l1(h),_=Zk2(r),s=p51(!_)):(f=AB(t),f=oQ(_),t=e[i++],f?(i+=t,v=Dr2(f)):w=10);else if(M<2029)M<2020?M<1909?(f=mK2(r),t=tw1(f),f=hs2(f=f<t),f=pF2(_),t=A71(w),u=ro2(f=t<f)):(f=nq(r),t=hK2(r),t=qh2(f=t<f),f=__2(r),t=e[i++],f?w=10:(i+=t,s=HI2(f))):M<2021?(f=uZ2(r),t=eZ(s),_=N5(f%=t),f=kb1(s),t=sf1(_),u=S5(f-=t)):(f=k62(t),t=Cf1(f),r=Zg2(f=t===f),f=qJ(_),t=J71(u),f=Va(f=t!==f));else if(M<2056){if(M<2030)return f=H02(r),t=eq(s),void(_=N6(f>>=t));f=E(_),t=dh1(u),v=AP(f*=t),f=WT2(r),t=J12(s),null!=(r=Zt(_)).variablePool?n(r,t,f):r[t]=f}else f=a91(t),f=RD2(t);else if(M<2486)if(M<2219)if(M<2155)s=(M<2111?(f=tV2(u),t=_j(f),t=lP2(f>>>=t),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),Rd2):(t=Lj(h),f=mc1(t),t=Ht1(f),f=t==f,Nd))(f);else if(M<2156)f=xV2(t),t=ct(f),s=$f2(f=t==f),f=BD2(_),t=y01(u),f=h1(f=t!==f);else{for(d=c.CFf,f=0;f<d;f++){let c=v=yz(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=mN(t),t=_T2(f),r=dy(f/=t)}else M<2383?M<2220?(f=d52(l=z),f=NV(r),t=b$2(s),_=gY2(f%=t)):(f=GU2(t),t=__1(h)):(f=YZ(r),t=cK(s),_=wa(f^=t),f=PB(r),t=G$2(s),_=jU(f%=t));else if(M<2732)if(M<2504)if(M<2487){for(f in p=[],t=ED2(t))p.push(f);z.for_in_xh_cbb_list=p}else f=e[i++],t=lh2(f),f=qs1(s),t=Kv1(f),t=EF2(f=t<=f);else{if(M<2505)return void(f=yb1({}));f=A82(s),t=M1(f),f=x21(f=(t=SQ2(f=Sd1(t=Kl(f=t<=f))))-f)}else if(M<2858)if(M<2733){for(f=e3(),t=Dc(),YK(f=t!=f),f=e[i++],r=Q71(_),g=[],t=0;t<f;t++)g.splice(0,0,Z12(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=Z81(s)}else f=J5(t),t=NN(f),r=N8(f/=t),_=iU2(t),t=C12(-_);else f=Ul(z),t=Az(f),_=TS2(f=f<=t),f=e81(r),t=e[i++],f?w=10:(i+=t,s=Yb2(f));else if(M<4499)if(M<3749)if(M<3410)if(M<3288)M<2977?M<2870?(f=RF(),t=A3(),L32(f=t!=f),f=yr2(s),t=kN2(_),u=KN(f-=t)):(f=GW2(t),t=pw2(f),f=NH(f=t-f),f=J4(r),t=If1(s),_=yM2(f%=t)):M<2978?(f=RH(r),t=e[i++],f?w=10:(i+=t,s=ie2(f)),_=ds(r),s=rb1(!_)):(f=wY2(t),t=Yv2(f),r=R71(f/=t),f=b6(_),t=Oo(w),u=Wc1(f=t<f));else if(M<3330)if(M<3289)f=hF2(t),t=Jn(f),r=yq(f=t===f),f=h2(_),t=qw1(u),f=W0(f=t!==f);else for(f=Jp2(_),t=tG(u),v=_1(f*=t),d=c.CFf,f=0;f<d;f++){let c=v=Ma2(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<3331?(t=aO2(h),f=fU(t),t=Fn2(f),s=af2(f=t==f)):(t=z52(h),_=BQ2(r),f=DH(typeof _));else if(M<3604)M<3495?M<3411?(f=t51(r),t=LK2(f),f=TT2(f=f<t),f=jP2(r),(t=pt1(f)).push(f),f=FN2(t)):(f=Uc1(r),t=hl(s),_=yJ(f>>=t),f=$y2(t),t=kP(f),s=Cf2(f=t==f)):d=(M<3496?(f=N82(t),t=dW(f),f=f4(f+=t),f=xn(u),t=bY2(v),f|=t,ar1):(f=KK2(_),t=kV(u),v=Na(f*=t),f=f32(u),t=tm2(v),f|=t,kw1))(f);else if(M<3697)M<3605?(f=Sp2(r),t=w51(r),t=yw1(f=t<f),f=sk2(u),t=uO(v),d=fm2(f|=t)):(f=qA(t),t=Vf2(r),r=_8(f&=t));else{{f=d0(t),t=mh2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=ol1(f)}f=qk2(t),t=rn2(f),f/=t,r=eo1(f)}else if(M<4067)if(M<4004)if(M<3838)if(M<3750)for(d=c.CFf,f=0;f<d;f++){let c=v=u52(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else f=k7(_),t=SB2(u),f=k_2(f=t!==f),t=Kf1(h);else M<3839?(f=TV2(w),r=e[i++],f[constantPool[r]]+=1,f=on(r),t=ty1(t),f=lT(f<<=t)):t=E12([]);else if(M<4062)if(M<4005)for(f=iq2(t),t=Lm(f),f=$s1(f=t-f),f=c.CFf,t=0;t<f;t++)w=Gq2(t),z[w]=o[t];else f=dn1(u),t=e[i++],f?w=10:i+=t,f=eW2(u),t=s6(f),t=Ov(f>>>=t);else for(f=$E2(r),t=pe(r),t=y41(f=t<f),d=c.CFf,f=0;f<d;f++){let c=v=ql1(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<4411?M<4207?M<4068?(f=va2(_),t=Yh(u),v=st(f*=t),f=_t(r),t=d9(s),_=M02(f%=t)):(f=HT2(t),t=Fe(f),s=ZZ(f=t==f),f=Ej(w),r=e[i++],f[constantPool[r]]+=1):f=M<4208?(f=Dc2(s),t=Du(_),u=rr2(f-=t),mk(typeof(_=Qi2(r)))):(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Hs2(f),Km(t)):M<4486?M<4412?(f=uu2(r),t=e[i++],f?w=10:(i+=t,s=Zr(f)),f=uN2(t),t=A21(f),f=zy(f+=t)):(f=e[i++],t=pR2(r),r=yf1(f),t[constantPool[f]]=r):(f=qf1(r),t=rf2(f),f=Pq2(f=f<t),f=oE2(t),t=L$2(r),null!=(r=Hn(s)).variablePool?n(r,t,f):r[t]=f,f=KG2(r));else if(M<5377)if(M<4873)M<4623?M<4592?M<4500?t=R0(h):(f=_H2(_),t=e[i++],f?(i+=t,v=JN(f)):w=10):M<4593?(f=ic1(t),t=U52(f),f=su2(f=t-f),t=gv(h)):(f=Mf1(_),t=jt(w),u=j3(f=t<f),f=s22(t),t=G91(f),f=Nw2(f=t-f)):M<4804?M<4624?(f=Eb2(t),t=mb1(f),r=Lt1(f=t===f),f=_Q(_),t=Sq(r),null!=(r=aV(f)).variablePool?n(r,t,f):r[t]=f):(f=IJ2(t),t=cC2(f),y=ST2(f=ER(f+=t)),null!=(P=VR(t)).variablePool?n(P,y,v):P[y]=f):(f=wn2(_),t=Xc2(u),v=a32(f*=t),t=DY2(h));else if(M<5169)M<4937?M<4874?(f=ci(r),t=Af1(s),_=R9(f^=t)):(f=D81(t),t=Jn2(f),f=ZG2(f+=t),f=tf(_),t=jk(u),f=m4(f=t!==f)):f=M<4938?(f=c71(u),t=Vl1(v),d=HL(f|=t),f=t12(t),t=zp2(f),Tt(f=t-f)):(f=xw1(r),t=jt2(f),r=delete t[f],f=nc2(r),Cu1({}));else if(M<5347)if(M<5170)for(f=vO(r),t=Sc(s),_=Nl2(f%=t),d=c.CFf,f=0;f<d;f++){let c=v=Lw(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else _=nk2(r),s=kM2(!_),f=ih2({});else _=Rp(r),s=vy1(!_);else if(M<5619)if(M<5466)if(M<5428){if(M<5378)return _=fw2(f),void(t=WA(void 0));f=MC2(_),t=Z_1(w),u=gC(f=t<f),f=Mw(z),t=zq2(f),_=HW(f=f<=t)}else M<5429?(_=JE2(r),s=J2(!_),f=A62(),t=uG(),Oc2(f=t!=f)):(f=dk2(),t=gX2(),rB(f=t!=f),f=ba1(w),r=e[i++],f[constantPool[r]]+=1);else M<5497?M<5467?(f=bS2(t),t=ze2(f),s=Wf2(f=t==f),_=Qt2(f),t=WR2(void 0)):(_=G4(t),t=vd(-_),f=bd1(r),t=e42(s),_=NL2(f>>=t)):(f=t_1(l=z),f=X41({}));else if(M<5870)M<5750?M<5620?(_=O21(r),s=Jg2(!_),f=gG(z),t=pF(f),_=zc2(f=f<=t)):(f=Rs2(_),t=Ok(r),null!=(r=Xl2(f)).variablePool?n(r,t,f):r[t]=f,f=Lt(t),t=Oh(r),r=Kj(f&=t)):M<5751?(_=YP1(r),f=vr(~_),f=kl2(t),t=_h2(f),r=Jn1(f=t===f)):(t=PU(h),f=Vb2(),t=ae1(),M8(f=t!=f));else if(M<5884)if(M<5871)f=$W(r),t=gm(r),t=ok2(f=t<f),f=U61(z),t=nF(f),_=GQ2(f=f<=t);else{{for(f=e[i++],r=J32(_),g=[],t=0;t<f;t++)g.splice(0,0,KH(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=VN2(s)}f=mV(_),t=id2(u),f=t!==f,f=KS(f)}else f=Wt(r),t=bv1(t),f=lK(f<<=t),f=Mc1(z),t=J_1(f),_=_P2(f=f<=t);else if(M<8676)if(M<7275)if(M<6551)if(M<6165)if(M<6060)M<5967?M<5887?(f=Pn(s),t=F4(_),u=rg2(f-=t),_=iy(r),s=Qx(!_)):(t=em([]),f=Wg2(u),t=B32(v),d=_K(f|=t)):M<5968?(f=SZ(t),t=_u(f),r=WG2(f/=t),f=pV2(t),t=Wo2(f),r=_v2(f/=t)):(f=Va1(t),t=JU(r),f=fd1(r=K91(f&=t)),t=ly(s),_=Jy2(f>>=t));else if(M<6132)if(M<6061){for(f=qT2(r),t=e71(s),_=S32(f>>=t),z.for_in_xh_cbb_list=p,f=e[i++],r=$t1(_),g=[],t=0;t<f;t++)g.splice(0,0,FM(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=_c(s)}else f=LH2(r),t=Ea(f),f=D71(f=f in t),f=Jk2(r),t=oO(s),_=ik(f>>=t);else M<6133?(f=u5(r),t=t72(r),t=Iq2(f=t<f)):(f=En(u),t=wa2(f),f=cr(t=Ns(f>>>=t)),t=LX2(r),r=og(f&=t));else if(M<6381)M<6314?M<6166?(y=rq2(f),null!=(P=tW2(t)).variablePool?n(P,y,v):P[y]=f,f=od2(u),t=Fg2(v),d=ta2(f|=t)):(f=gs2(u),t=$X(f),t=OX(f>>>=t)):M<6315?(f=MJ2(r),t=ml2(f),f=JV(f=f<t),f=Go1(t),t=fJ(s),v=oV(f=f instanceof t)):(f=e[i++],i+=f,f=b91(t),t=HU2(f),s=IR(f=t==f));else if(M<6435)M<6382?(f=rp2(t),t=Nn1(s),v=nt(f=f instanceof t)):(f=K$2(s),t=Ym2(f),t=j_(f=t<=f),f=z0(u),t=JH(v),d=eH(f|=t));else{for(f=Dw2(t),t=Gv2(f),f=OB2(f=t-f),f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,lI()):s.splice(0,0,p$2());for(f=r71(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}else if(M<6855)M<6739?M<6599?M<6552?(f=p6(u),t=e[i++],f?w=10:i+=t,f=ff(r),t=Bi2(s),_=ZS(f%=t)):(_=EB2(r),s=$A(!_),f=ry(t),t=D42(r),r=Df(f&=t)):t=M<6600?(f=b62(r),t=dB(f),f=Kb2(f=f<t),f=u91(u),t=V5(f),xR(f>>>=t)):(f=e[i++],t=UG(constantPool[f]),C31([])):M<6798?M<6740?(f=PD2(t),t=FI2(f),f=jb1(f=t-f),f=Sf1(_),t=dF(u),f=rb2(f=t!==f)):(f=Xu1(_),t=Ki1(u),f=_q(f=t!==f),_=Vt2(f),t=$_2(void 0)):(f=Bx(r),t=Fc2(s),_=q41(f%=t),f=Uh2(s),t=EU2(f),t=OG2(f=t<=f));else if(M<7100)if(M<6905)if(M<6856){for(f in f=hm(t),t=DA(f),f=UU(f=t-f),p=[],t=Me1(t))p.push(f);z.for_in_xh_cbb_list=p}else f=OM(t),t=Ni2(f),f=gb(f=t-f),f=ar2(t),t=hf1(r),r=xa1(f&=t);else M<6906?(f=n82(r),t=ee(f),f=NS2(f=f<t),f=Us1(u),t=e[i++],f?w=10:i+=t):(f=It1(u),t=Tz(f),t=Fk(f>>>=t),f=Sl1(r),t=vE(f),f=l7(f=f<t));else M<7151?f=M<7101?(f=me1(u),t=Ta(f),pl2(t=hS2(f>>>=t))):(f=Go2({}),f=Sf2(r),t=Ez2(f),gy(f=f in t)):(f=oh2(r),t=Ct1(t),f=kE2(f<<=t),f=N12(t),t=D8(f),s=Hz(f=t==f));else if(M<7921)if(M<7609)M<7315?M<7289?M<7276?(f=xZ(s),t=M6(_),u=zD2(f-=t),f=_p2(t),t=YR(f),r=Os1(f=t===f)):(f=u61(_),t=bL2(r),null!=(r=aJ2(f)).variablePool?n(r,t,f):r[t]=f,f=eC2(z),t=Zb(f),_=iE(f=f<=t)):M<7290?(_=l51(t),t=z4(-_),f=X61(t),t=Pb1(f),f=gN(f+=t)):(f=bT2(t),t=n41(f),r=LW2(f=t===f),f=r01(t),t=$d1(r),r=Bi(f&=t)):M<7455?M<7316?(f=mZ2({}),f=op2(r),t=e[i++],f?w=10:(i+=t,s=R$2(f))):(f=Lk2(u),t=kx2(v),d=Gz(f|=t),f=LN(r),t=u02(s),_=jC2(f^=t)):(f=Xv(_),t=LW(u),v=eB(f*=t),_=fV2(r),f=j$(typeof _));else if(M<7787)M<7716?M<7610?(f=mn(t),t=MN2(f),s=Xa2(f=t==f),f=Z62(r),t=vd2(s),_=M82(f^=t)):(f=kn(_),t=i_2(u),v=q62(f*=t),f=_I(l=z)):M<7717?(f=yr1(t),t=E_2(f),s=A52(f=t==f),f=NI2(r),t=Ye1(t),f=ua(f<<=t)):(f=Ga(r),t=iG(t),f=Kb1(f<<=t),f=iO2(r),t=TK2(s),_=C72(f^=t));else if(M<7823)if(M<7788)f=Aw1(t),t=mw(f),f=DP2(f=t-f),f=Ax(t),t=Bh2(f),s=a5(f=t==f);else{for(f=O9(z),t=IW2(f),_=t71(f=f<=t),f=e[i++],r=SJ2(_),g=[],t=0;t<f;t++)g.splice(0,0,bo2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=yH(s)}else f=e[i++],t=u71(constantPool[f]),f=V9(r),t=tX2(s),_=Pn2(f>>=t);else if(M<8231)if(M<8087)if(M<7974)if(M<7922)t=DN(h),_=MJ(f),t=Kh1(void 0);else{for(z.for_in_xh_cbb_list=p,f=e[i++],r=mE2(_),g=[],t=0;t<f;t++)g.splice(0,0,Uw2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=J72(s),f=X01(r),t=re2(s),_=CQ(f>>=t)}else if(M<7975){f=Z6(_),t=L51(u),f=Jb1(f=t!==f),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else f=pi(_),t=tu(r),null!=(r=Jr1(f)).variablePool?n(r,t,f):r[t]=f,f=of2(r),t=vy(s),_=F1(f^=t);else M<8184?M<8088?(f=Bs(t),t=R31(f),f=gr2(f=t-f),f=_91(_),t=ey1(r),null!=(r=DE(f)).variablePool?n(r,t,f):r[t]=f):(_=Bp(f),t=t$(void 0),f=e[i++],t=cI(r),r=to2(f),t[constantPool[f]]=r):(f=e[i++],t=ri1(r),r=Fa(f),t[constantPool[f]]=r,_=za(f),t=dQ(void 0));else if(M<8485)M<8378?M<8232?(t=il([]),f=Bc2(t),t=Si1(f),r=ux2(f/=t)):(f=Dt1(r),t=rQ(f),f=FY2(f=f<t),f=un(_),t=_y(w),u=Tq(f=t<f)):M<8379?(f=RC2(s),t=D72(_),u=Xo1(f-=t),f=db(t),t=Iy(f),r=CI2(f=t===f)):(f=OI(r),t=uB(r),t=PL(f=t<f),f=Ks1(l=z));else if(M<8578)if(M<8486){f=AI({});throw f=Dx(t)}else f=D3(s),t=Wx(_),u=Rx2(f-=t);else f=Vh2(),t=xO2(),lb1(f=t!=f),_=u$(r),s=$z(!_);else if(M<9948)if(M<9300)if(M<9103)if(M<8897)M<8862?M<8677?(f=gB2(r),t=Ul2(f),r=delete t[f],f=o9(r),f=lp2({})):(f=sm2(s),t=rF(_),u=j81(f-=t),f=zY(s),t=fw(f),t=YQ2(f=t<=f)):M<8863?(f=H8(r),t=qm(r),t=DU2(f=t<f),f=V91(r),r=e[i++],--f[constantPool[r]]):(t=gj(h),f=IU(r),t=OF(f),r=delete t[f],f=m0(r));else if(M<8934)M<8898?(f=Hg2(s),t=z5(_),u=Ri(f-=t),f=iV(t),t=yp(f),r=JJ(f/=t)):(f=jn(t),t=r1(f),r=sD(f=t===f),f=wz(t));else if(M<8935)f=EP(z),t=Co(f),_=LM2(f=f<=t),f=oi2(r),(t=nM(f)).push(f),f=cY2(t);else{for(f=WR(t),t=Iq(f),r=Ad1(f/=t),z.for_in_xh_cbb_list=p,f=e[i++],r=KY(_),g=[],t=0;t<f;t++)g.splice(0,0,cb1(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=FH2(s)}else M<9206?M<9169?M<9104?(f=Qf(t),t=Hv2(r),r=gT(f&=t),f=qU2(l=z)):(f=TB2(r),t=eG(f),f=Xj2(f=f in t),f=TP(t),t=Cx(f),s=xU(f=t==f)):M<9170?(f=Ua2(_),t=KJ(u),f=LS(f=t!==f),f=Mi(t),r=e[i++],f?i+=r:w=9):(f=rO2(r),t=La(s),_=cq2(f%=t),f=uJ2(t)):M<9285?M<9207?(f=w11(t),t=QK2(r),null!=(r=g92(s)).variablePool?n(r,t,f):r[t]=f,f=Ao(r),_=x7(r),s=Ei2(!_)):(f=B81(s),t=X6(f),t=Us2(f=t<=f),f=jh(),t=ZG(),Qs1(f=t!=f)):(f=rr(t),t=Vx2(f),f=Uf(f=t-f),f=Zf(t),t=u9(f),r=m_(f=t===f));else if(M<9605)if(M<9467)M<9409?M<9301?(f=Cl1(t),t=Q0(r),r=En2(f&=t),f=OK2(s),t=jB2(f),t=Iv2(f=t<=f)):(t=Yq([]),f=mX(t),t=dE(f),f=Ur1(f+=t)):r=(M<9410?(f=K81(_),t=e[i++],f?(i+=t,v=zV(f)):w=10,f=K2(t),t=pw(f),Rj):(f=yD2(r),t=If2(f),f=si2(f=f in t),f=gA(t),t=mw2(f),tn))(f=t===f);else if(M<9586){if(M<9468)return f=_m(t),t=e91(r),void(r=Z0(f&=t));f=ty(t),t=OF2(f),s=Cb1(f=t==f),f=pz2(r),t=lY2(f),f=T_1(f=f in t)}else f=Z$(s),t=eg2(_),u=jl2(f-=t),f=_02(r),r=e[i++],--f[constantPool[r]];else if(M<9837)if(M<9820)if(M<9606){t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}t=Mh1([])}else f=Oq(),t=R32(),rD(f=t!=f),f=_21(t),t=th2(f),f=Lc2(f=t-f);else if(M<9821)f=UE2(_),t=KF2(u),f=Qq2(f=t!==f),f=e[i++],t=nP2(constantPool[f]);else{for(f=c.CFf,t=0;t<f;t++)w=HE2(t),z[w]=o[t];f=Cp(),t=B01(),f=t!=f,Lr2(f)}else M<9919?M<9838?(_=i5(r),f=Zn2(~_),f=p92(s),t=Yw2(f),t=FM2(f=t<=f)):(f=S_2(s),t=R_2(f),t=Qu1(f=t<=f),f=rU(s),t=X92(_),u=VZ2(f-=t)):(t=zx2([]),f=g2(_),t=CQ2(w),u=V31(f=t<f));else if(M<10667)if(M<10155)if(M<10057)M<10007?M<9949?(t=lC([]),f=FL2(w),r=e[i++],f[constantPool[r]]+=1):(f=d5(w),r=e[i++],f[constantPool[r]]+=1,f=nf1({})):M<10008?(f=da2(r),t=CL2(t),f=XV(f<<=t),f=E7(t)):(f=Q5(t),t=QC2(f),r=qb(f=t===f),f=Gf(t),t=Ad2(f),s=kl(f=t==f));else if(M<10151)if(M<10058){f=xf1(t),t=gP2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=_81(f),f=ym(t),t=SW2(f),s=zf1(f=t==f)}else f=e11(r),t=Rb1(s),_=KI(f>>=t);else f=PS2(t),t=iR2(r),r=dq2(f&=t),t=Lr1([]);else if(M<10319)if(M<10213)M<10156?(f=aJ(t),f=cR(_),t=lG(u),v=ld(f*=t)):(f=X31(s),t=ZY(_),u=kK(f-=t),f=q81(t),t=pr1(f),r=Ds(f/=t));else if(M<10214)f=Ai1(r),t=vb2(r),t=cz(f=t<f),y=ve2(f),null!=(P=ES2(t)).variablePool?n(P,y,v):P[y]=f;else{for(f in f=dE2(z),t=El1(f),_=TE2(f=f<=t),p=[],t=O_(t))p.push(f);z.for_in_xh_cbb_list=p}else{if(!(M<10515))return f=Jb(r),t=N0(s),void(_=Wx2(f^=t));M<10320?(t=Ru2([]),_=hM(r),f=J62(~_)):(f=jd(t),t=iI(f),s=nW(f=t==f),f=Ir(r),t=O92(s),null!=(r=zU2(_)).variablePool?n(r,t,f):r[t]=f)}else if(M<11125)if(M<11e3)if(M<10810)M<10668?(f=Yn1(r),t=L5(s),f=Ac2(_=qN(f^=t)),t=e[i++],f?(i+=t,v=LJ2(f)):w=10):(f=HJ(t),t=ic(f),f=sM(f+=t),f=Ku1(_),t=$r2(r),null!=(r=si1(f)).variablePool?n(r,t,f):r[t]=f);else if(M<10811){t=Hm2([]),f=yu(t),t=_L2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=D41(f)}else f=SU(z),t=yf2(f),f=uc(_=EH(f=f<=t)),t=e[i++],f?(i+=t,v=Ae(f)):w=10;else M<11078?M<11001?(f=xs(t),t=eq2(r),r=e02(f&=t),f=HQ(t),t=fe(r),r=wB(f&=t)):(f=IR2(),t=gw2(),pM2(f=t!=f),f=BM2(t),t=q6(f),s=ay2(f=t==f)):(f=ee1({}),f=nh2(t),r=e[i++],f?i+=r:w=9);else M<11383?M<11324?t=M<11126?(f=d4(_),t=XO2(r),null!=(r=Ui(f)).variablePool?n(r,t,f):r[t]=f,_=V22(f),yI(void 0)):(f=cb2(r),t=Ex(r),t=sH(f=t<f),Ny(f=e[i++])):M<11325?(t=Y5(h),f=lU(),t=DB(),fR2(f=t!=f)):(t=nQ(h),_=pD(r),s=ef2(!_)):M<11594?f=M<11384?(f=CS2(t),r=e[i++],f?i+=r:w=9,f=Pw2(r),t=iH2(f),FA2(f=f<t)):(f=Nb2(r),t=nv1(t),f=pQ(f<<=t),sP2({})):(f=e[i++],t=aq(f));else if(M<18757)if(M<15348)if(M<13468)if(M<12777)if(M<12327)if(M<11867)if(M<11765)M<11598?(f=Xr2(z),t=qM2(f),_=Cg(f=f<=t),t=LU([])):(f=Yb(r),t=ap2(s),f=Vo1(_=EA2(f%=t)),t=n91(r),null!=(r=U21(f)).variablePool?n(r,t,f):r[t]=f);else if(M<11766)f=hE(l=z),f=zu1(t),t=Yx(f),s=Mw1(f=t==f);else{for(f in f=Ew2(t),t=Yl1(f),f=QY(f=t-f),p=[],t=uz2(t))p.push(f);z.for_in_xh_cbb_list=p}else if(M<12203)if(M<11868){for(f in p=[],t=Mn1(t))p.push(f);z.for_in_xh_cbb_list=p}else f=_d1(z),t=ix2(f),_=c5(f=f<=t),f=lH(t),t=wT(f),f=zH(f+=t);else if(M<12204)f=cn2(r),t=GE2(r),t=ge1(f=t<f),f=bV2(r),t=B92(s),_=Qd1(f>>=t);else for(f=On1(t),t=I91(f),r=Y62(f/=t),d=c.CFf,f=0;f<d;f++){let c=v=Uo2(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<12569?M<12483?t=M<12328?(f=BE2(t),t=n01(r),r=Tl(f&=t),_=ky(f),fD2(void 0)):(f=PZ(r),t=xs1(s),_=xA2(f%=t),f=ub2(u),t=Tr2(f),b22(f>>>=t)):M<12484?(_=pd2(r),f=mL(typeof _),_=OO2(f),t=M31(void 0)):(f=KX(s),t=zg2(_),f=wf1(u=Mo2(f-=t)),t=j6(v),d=P6(f|=t)):M<12730?M<12570?(_=a01(r),s=X_1(!_),f=TQ2(t),t=hR2(f),r=rA(f/=t)):(f=_i1({}),f=T_2(_),t=kd2(u),f=mn2(f=t!==f)):(f=i42(s),t=I$2(_),u=uA2(f-=t),f=uh1(z),t=j7(f),_=Zm2(f=f<=t));else if(M<13261)M<13021?M<12941?t=M<12778?(f=rc(z),t=pk2(f),_=V$(f=f<=t),_=vn(f),nP1(void 0)):(function(){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,Oy1()):s.splice(0,0,vn2());for(f=VS(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}(),jl(h)):M<12942?(f=Vc1(t),t=ZB(r),r=Uy2(f&=t),f=sw2(_),t=Pb(u),f=We2(f=t!==f)):(f=JW(r),t=D4(t),f=S22(f<<=t),f=yk(_),t=Jh1(u),v=Cj(f*=t)):f=M<13252?M<13022?(function(){for(f=c.CFf,t=0;t<f;t++)w=Cs(t),z[w]=o[t]}(),DB2({})):(f=j01(t),t=i62(f),s=VE2(f=t==f),f=sv2(t),t=v22(f),Mx2(f=t-f)):(f=LS2({}),f=H4(_),t=Uh1(u),BS(f=t!==f));else if(M<13345)if(M<13268)M<13262?(f=AU2(r),t=I1(s),_=Cm(f>>=t),t=wI2(h)):(f=co2(),t=e_1(),v$2(f=t!=f),f=Ix(t),t=cR2(f),f=Bx2(f+=t));else if(M<13269){f=o12(),t=tW(),f=t!=f,JS(f);{for(z.for_in_xh_cbb_list=p,f=e[i++],r=DM2(_),g=[],t=0;t<f;t++)g.splice(0,0,GR2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Ae1(s)}}else f=iz2(u),t=qc1(f),t=pz(f>>>=t),f=yU2(r),t=Ud(s),_=rn(f^=t);else M<13453?M<13346?(f=g52(t),t=lN(f),r=FF(f=t===f),t=hv1(h)):(f=D6(r),t=nj(s),f=I3(_=Th(f>>=t)),t=ig(w),u=Cr(f=t<f)):(f=z72(r),t=Md(f),f=b4(f=f<t),f=PO2(r),t=iP1(s),_=iy2(f^=t));else if(M<14466)if(M<14178)if(M<13780)if(M<13630)if(M<13469)f=qI2(u),t=RP1(v),d=pH(f|=t),_=nm2(r),f=nC2(typeof _);else{for(f in f=Cu2({}),p=[],t=Y12(t))p.push(f);z.for_in_xh_cbb_list=p}else M<13631?(f=RS2(t),t=Ip(f),f=ny1(f=t-f),_=zu(r),s=Eu2(!_)):(_=CP2(f),t=b3(void 0),f=Y7(r),t=uW(f),f=yh(f=f<t));else M<14124?M<13781?(f=e[i++],i+=f,f=RI(),t=Ys2(),SX2(f=t!=f)):(f=Xz(),t=xk(),R22(f=t!=f),f=E11(u),t=_92(f),t=FP2(f>>>=t)):M<14125?(f=ww1({}),f=r82(r),t=k2(f),f=gg2(f=f<t)):(t=Jp([]),f=z_1(w),r=e[i++],f[constantPool[r]]+=1);else M<14266?M<14239?M<14179?(f=Ih2(t),t=ab(r),r=XR2(f&=t)):(f=_z2(t),t=de(f),s=ZL2(f=t==f),f=V_(t),t=yF2(f),f=Sa1(f=t-f)):M<14240?(f=TH(t),f=h52(s),t=kK2(f),t=F51(f=t<=f)):(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=nY2(f),f=OK(t),t=l1(f),f=Wt1(f+=t)):M<14416?M<14267?(f=xY({}),_=$Z(f),t=Mr2(void 0)):(f=cl(_),t=e[i++],f?(i+=t,v=u$2(f)):w=10):(_=w71(r),s=Ru1(!_),_=O41(f),t=DE2(void 0));else if(M<14987)if(M<14632)M<14608?M<14467?(f=rd1(t),t=uK(f),r=NO2(f=t===f),f=l5(t),t=S12(s),v=td2(f=f instanceof t)):(f=J6(r),t=$k2(t),f=jM(f<<=t),_=XU2(r),f=Zb1(typeof _)):M<14609?(f=Ld1(z),t=PA2(f),_=qi2(f=f<=t),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=aD(f)):(f=ps(t),t=xf2(f),r=qL(f/=t));else if(M<14731)if(M<14633){for(f in f=Ss2(r),t=aw(s),_=jG2(f>>=t),p=[],t=$92(t))p.push(f);z.for_in_xh_cbb_list=p}else f=ot(r),t=vo1(s),_=Mk(f^=t),f=Ff(),t=Da1(),W51(f=t!=f);else f=kO(t),t=S1(f),f=nm(r=a_(f=t===f)),t=df(s),_=p4(f%=t);else if(M<15219)if(M<15102)f=(M<14988?(f=lv2(r),t=iL2(s),f=m$2(_=nl2(f%=t)),t=t92(u),f=t!==f,V11):(f=e[i++],t=nB(f),f=aA2(_),t=l82(u),f=t!==f,Qj2))(f);else if(M<15103)f=Dz2(),t=qb1(),p3(f=t!=f),f=Ph(r),t=Dx2(s),_=g12(f%=t);else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=NC2(t),t=n81(f),s=m51(f=t==f)}else M<15308?M<15220?(f=lB2(t),t=O1(f),s=Sw(f=t==f),f=Ve1(r),(t=e31(f)).push(f),f=zO2(t)):(f=f42(t),f=W62(_),t=Fw2(u),v=Pe1(f*=t)):(f=e[i++],t=ev1(r),r=zC(f),t[constantPool[f]]=r,t=I31([]));else if(M<16895)if(M<15979)if(M<15764)if(M<15655)if(M<15392)if(M<15349){for(d=c.CFf,f=0;f<d;f++){let c=v=H62(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=CF2(r),t=yy(s),_=WK2(f^=t)}else _=m22(f),t=hx(void 0),t=Dg2(h);else M<15393?(t=sw([]),f=tx(u),t=e[i++],f?w=10:i+=t):(f=e32(_),t=VA(w),u=cl2(f=t<f),f=uw(t),t=S7(f),r=XS2(f/=t));else M<15672?M<15656?(f=r5(r),t=R92(s),_=U92(f%=t),f=dl(u),t=_O(v),d=u51(f|=t)):(f=x02(r),t=A$(s),f=LB2(_=yd2(f^=t)),t=Y71(u),v=ne2(f*=t)):f=(M<15673?(function(){for(f in p=[],t=BX(t))p.push(f);z.for_in_xh_cbb_list=p}(),f=QK(r),t=Of2(t),f<<=t,WH):(_=ar(r),s=Nu1(!_),f=am(t),t=U22(f),f+=t,GA2))(f);else M<15799?M<15774?M<15765?(f=ox(u),t=_e(v),d=Ca1(f|=t),f=c32(t),r=e[i++],f?i+=r:w=9):(_=JC(f),t=CJ2(void 0),f=GP(r),t=BX2(s),_=C22(f%=t)):M<15775?(f=jq2(u),t=mY2(f),t=lE(f>>>=t),f=e[i++],t=Q2(constantPool[f])):(f=lc1(s),t=gd1(_),u=Me2(f-=t),f=e[i++],t=LL2(r),r=wv1(f),t[constantPool[f]]=r):M<15916?M<15800?(f=Ql(t),t=rH(r),r=lu1(f&=t),f=yP(u),t=Tv2(v),d=u01(f|=t)):(f=tt1(s),t=M92(f),t=Sr2(f=t<=f),y=tR2(f),null!=(P=uK2(t)).variablePool?n(P,y,v):P[y]=f):(f=ul1(),t=JI2(),u72(f=t!=f),f=YJ2(r),t=yN(t),f=Lo1(f<<=t));else if(M<16389)M<16196?M<16069?M<15980?(f=yt(t),t=Sc1(f),r=dR2(f/=t),f=zh2({})):(f=Hy2(r),t=tp(f),r=delete t[f],f=jE2(r),_=cp2(r),s=r3(!_)):M<16070?(f=vM(r),t=De2(f),r=delete t[f],f=QN(r)):(f=My2(u),t=uP2(f),t=nJ(f>>>=t),_=Js2(r),s=Vi1(!_)):M<16325?M<16197?(_=ch(f),t=Dk2(void 0),_=ja(f),t=HW2(void 0)):(f=c91(r),t=Us(r),f=m11(t=t81(f=t<f)),t=cu(r),r=Ah1(f&=t)):(f=rF2(r),t=bO(s),_=jU2(f>>=t),f=L_1(),t=db2(),Zg(f=t!=f));else if(M<16751)if(M<16623)r=(M<16390?(f=fe1(r),t=Di2(s),_=v6(f>>=t),f=Cp2(t),t=Fa2(f),f=t===f,Hu1):(f=LT2(t),t=pl1(r),null!=(r=Pz2(s)).variablePool?n(r,t,f):r[t]=f,f=ey2(r),f=Ib1(t),t=Bw1(f),f/=t,nN))(f);else{if(!(M<16624))throw f=yS2(t);f=cD(t),t=FE2(f),s=oV2(f=t==f),f=pu1(t),t=K92(f),f=Au(f=t-f)}else M<16816?M<16752?(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=m8(f),f=Ur2(r),t=XI(r),t=Iz(f=t<f)):(f=_o1(r),t=$P2(f),r=delete t[f],f=iZ(r),_=kI(r),s=$E(!_)):(f=IQ2(_),t=e[i++],f?(i+=t,v=Ha(f)):w=10,f=Y31(_),t=rP1(u),v=CZ2(f*=t));else if(M<17482)if(M<17057)M<16987?M<16910?M<16896?(f=QV2(t),f=wQ2(u),t=P42(v),d=dK2(f|=t)):(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=L(f),f=fb1(t),t=z$2(f),f=_6(f+=t)):M<16911?(f=GI2(t),t=Wi1(f),s=P$(f=t==f),_=ws1(r),f=X1(~_)):(f=B(r),t=wZ(s),_=Tw2(f%=t),t=Nc2(h)):M<17054?M<16988?(f=vL2(r),r=e[i++],--f[constantPool[r]],f=KL(r),t=ua1(s),_=lw2(f%=t)):(f=Wp(t),t=xE(f),f=$V(f=t-f),f=ty2(t),t=R7(f),s=rP(f=t==f)):(f=Kj2(r),t=e[i++],f?w=10:(i+=t,s=ZW2(f)),f=kD(t));else if(M<17172)if(M<17105)f=(M<17058?(f=OU(r),t=S02(r),t=y31(f=t<f),f=My(r),t=qu2(f),f=f<t,Mn):(f=xL2(r),t=qd(s),f=H12(_=ct2(f>>=t)),t=mP1(u),f=t!==f,bw1))(f);else if(M<17106){for(d=c.CFf,f=0;f<d;f++){let c=v=l22(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=Mp(s),t=XC2(_),u=p91(f-=t)}else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=ih(r),t=q12(r),t=cu2(f=t<f)}else M<17448?M<17173?(f=rL2(u),t=e[i++],f?w=10:i+=t,f=g_(t),t=u2(f),r=Mi1(f/=t)):(f=it(s),t=hs1(_),u=yo2(f-=t),f=a1(),t=ef1(),hn2(f=t!=f)):(_=KE(r),s=C42(!_),f=wc2(_),t=rz2(u),f=Ih1(f=t!==f));else if(M<18377)if(M<17938)if(M<17891)if(M<17483){for(f=yA2(),t=Zd1(),q42(f=t!=f),z.for_in_xh_cbb_list=p,f=e[i++],r=C8(_),g=[],t=0;t<f;t++)g.splice(0,0,WL2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=za1(s)}else f=X8(r),(t=pj2(f)).push(f),f=Je(t),f=sb1(_),t=R11(u),f=S_1(f=t!==f);else d=(M<17892?(f=bc1(s),t=Ti2(_),f=_22(u=dP2(f-=t)),t=m32(v),f|=t,w82):(f=su(r),t=Vj2(t),f=_f2(f<<=t),f=KT2(u),t=BK2(v),f|=t,_v1))(f);else if(M<18135)if(M<17939){t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=ZM(s),t=g21(_),u=Lp(f-=t)}else f=aZ(s),t=Vc2(f),t=jy1(f=t<=f),f=gQ2(l=z);else f=Nq2(_),t=SM(w),u=WZ2(f=t<f),f=SV(t),t=gc2(f),f=Th1(f+=t);else M<18631?M<18564?M<18378?(f=CW2(),t=vN2(),Xy1(f=t!=f),f=hH2(t),t=av1(f),s=mx2(f=t==f)):(f=Y72(t),t=eM(r),r=VQ(f&=t)):f=M<18565?(f=e[i++],i+=f,f=w91(_),t=o22(u),ib2(f=t!==f)):(f=P7(u),t=ne(f),t=_42(f>>>=t),pW({})):M<18695?M<18632?(f=Nr(_),t=Q62(u),f=ME2(f=t!==f),f=e[i++],i+=f):(f=lc2(t),t=n_(s),v=Fy1(f=f instanceof t),f=U_2(s),t=Oo2(f),t=c8(f=t<=f)):(f=W$2(r),t=Zo2(s),_=fS2(f>>=t),_=Fx(t),t=f1(-_));else if(M<22695)if(M<20659)if(M<19853)if(M<19109)M<18922?M<18852?M<18758?(f=cy(r),t=rC2(s),f=jO2(_=co(f^=t)),t=Qa1(u),f=aT(f=t!==f)):(f=NX(r),t=it1(r),t=Jq2(f=t<f),f=Av2(_),t=e[i++],f?(i+=t,v=x_1(f)):w=10):f=(M<18853?(f=V_2(t),t=Ue(f),r=Iu(f=t===f),f=NF(_),t=X5(u),f=t!==f,he2):(f=_x2(r),t=Lx2(f),f=x$(f=f<t),f=xn1(t),t=iX(f),f=t-f,H6))(f):M<19054?M<18923?(f=F42({}),f=e[i++],t=pM(constantPool[f])):(f=k81(r),t=zs1(s),_=hm2(f>>=t),f=XN2(z),t=h82(f),_=Cm2(f=f<=t)):M<19055?(f=Zw1(t),t=ZT2(r),r=L72(f&=t),f=_l({})):(f=we(r),t=Tl2(t),f=lL2(f<<=t),f=Q81(r),t=b0(r),t=oH(f=t<f));else if(M<19640)if(M<19398)M<19110?(_=GK(f),t=ld1(void 0),f=jE(r),t=PN(t),f=y_1(f<<=t)):(f=tw(r),t=RG(f),f=Xp(f=f in t),_=QR2(f),t=gs1(void 0));else{if(!(M<19399))return f=aG2(s),t=p71(_),void(u=dZ2(f-=t));f=Jx2(_),t=KA(u),v=FQ2(f*=t),f=LD(s),t=_g(_),u=_k(f-=t)}else M<19825?M<19641?(f=lS(u),t=Bs2(f),t=f51(f>>>=t),f=dO(r),t=QD(r),t=zB2(f=t<f)):(f=Rh(r),t=w0(f),f=Tm(f=f in t),f=vu1(t),t=Eg(f),s=zP(f=t==f)):(f=W91(s),t=pG2(f),t=cP(f=t<=f),_=Bz2(r),s=eA2(!_));else M<20376?M<20246?M<19919?M<19854?(f=dd(l=z),f=yx2(u),t=F5(v),d=MS(f|=t)):(_=Yg2(r),s=mI2(!_),f=Wm(_),t=s7(u),v=J41(f*=t)):M<19920?(t=go1([]),f=DR2(t),t=AK(f),f=f_(f+=t)):(f=Z31(r),t=GG2(s),_=Ly2(f>>=t),f=mB(r),t=ws(s),_=BO2(f%=t)):M<20362?M<20247?(f=ML2(s),t=Ob1(_),u=fu2(f-=t),_=r51(r),s=M51(!_)):(f=pg(r),t=b_(s),null!=(r=Cx2(_)).variablePool?n(r,t,f):r[t]=f,t=Ad(h)):(f=N62(s),t=c62(_),f=du2(u=Un(f-=t)),t=e[i++],f?w=10:i+=t):M<20593?M<20427?M<20377?(f=Ea2(_),t=tn1(u),f=Ti1(f=t!==f),f=e[i++],i+=f):(f=Px(r),t=yb(s),_=do2(f^=t),f=UR2(r),t=oJ2(f),r=delete t[f],f=zd(r)):M<20428?(f=p12(t),t=HD(f),s=Az2(f=t==f),f=x$2(r),t=LY(t),f=c51(f<<=t)):(f=h81(u),t=UU2(f),f=XU(t=oy1(f>>>=t)),t=sx(s),v=Xc1(f=f instanceof t)):M<20640?M<20594?(t=Fu2([]),y=ur2(f),null!=(P=is(t)).variablePool?n(P,y,v):P[y]=f):(f=FS(_),t=xe1(w),u=Ii2(f=t<f),t=jv1(h)):(f=lW2(r),t=P2(t),f=$q(f<<=t),t=MW2(h));else if(M<21701)if(M<21105)M<21081?M<20757?M<20660?(f=e[i++],t=an2(f),f=QA2(r),t=Z9(f),f=py1(f=f<t)):(f=LG2(t),t=xb(f),f=en(f=t-f),f=bI2(_),t=pW2(u),v=$62(f*=t)):M<20758?(f=L22(r),t=Ap(r),t=Rz(f=t<f),_=Ju1(r),s=BY2(!_)):(f=aL2(t),t=XN(r),f=tK(r=AQ2(f&=t)),t=a12(f),r=delete t[f],f=ob(r)):M<21103?t=(M<21082?(f=Il1(u),t=gO2(f),f>>>=t,O32):(function(){throw f=WB(t)}(),f=hk(r),t=U7(r),f=t<f,M71))(f):(_=kf2(r),s=CY2(!_),f=qH(r),r=e[i++],--f[constantPool[r]]);else if(M<21585)if(M<21418){if(!(M<21106))return f=aP1(_),t=GB(u),void(f=df2(f=t!==f));f=mg(r),t=e[i++],f?w=10:(i+=t,s=dn(f)),f=jl1(t),t=tF(f),r=VG2(f=t===f)}else if(M<21419){for(f=c.CFf,t=0;t<f;t++)w=e51(t),z[w]=o[t];f=q4(_),t=kh1(w),u=_3(f=t<f)}else f=xW2(t),t=_01(f),f=ZP(r=KO2(f/=t)),t=e[i++],f?w=10:(i+=t,s=eJ(f));else M<21653?M<21586?(f=kU(t),t=OL2(r),null!=(r=Es2(s)).variablePool?n(r,t,f):r[t]=f,f=ts(r),f=r81(t),t=n0(f),f=JM2(f+=t)):(f=fF2(r),t=yQ2(r),t=Oi1(f=t<f),t=bM(h)):(f=Fz(t),f=ud2(u),t=SH2(f),t=kP2(f>>>=t));else if(M<22101)if(M<21934)if(M<21798)M<21702?(f=rc1(),t=fz(),_a(f=t!=f),f=fh1(u),t=sm(v),d=b_2(f|=t)):(f=j41(r),t=fB(r),t=zs2(f=t<f),t=i61([]));else if(M<21799)f=ug2(s),t=Uu1(_),u=R8(f-=t),f=Du1(_),t=QF2(u),v=Gy1(f*=t);else{f=ou(_),t=QR(u),f=Ul1(f=t!==f),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else if(M<22061)if(M<21935)_=kJ2(f),t=X51(void 0),f=Gm(r),t=AL(s),_=po(f%=t);else{{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,ck2()):s.splice(0,0,yc());for(f=u12(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}t=a81([])}else f=Pu2(t),t=hj2(r),null!=(r=ms1(s)).variablePool?n(r,t,f):r[t]=f,f=oC(r),f=_72(r),t=FI(t),f=yw2(f<<=t);else if(M<22512)M<22433?M<22102?(f=RT(w),r=e[i++],f[constantPool[r]]+=1,f=bM2(t),t=Os2(f),r=z41(f=t===f)):(f=Lb2(_),t=l4(u),f=kg2(f=t!==f)):M<22434?(_=YU2(r),s=fa1(!_),f=mA(l=z)):(f=sY(r),t=my(s),_=wy2(f>>=t),f=Mq2(r),t=Tz2(s),_=JD2(f>>=t));else if(M<22670)if(M<22513)f=Hy(r),t=U62(s),_=aP(f^=t);else{f=Fx2(r),t=ef(t),f<<=t,f=aA(f);{f=qV2(t),t=Nh2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Tb(f)}}else f=EZ(u),t=Bo2(f),t=LR(f>>>=t),f=Em2(r),t=H91(s),_=pK(f>>=t);else if(M<24157)if(M<23185)if(M<22932)if(M<22837)if(M<22747)if(M<22696)f=Bt2(t),r=e[i++],f?i+=r:w=9,f=Mt2(t),t=Cf(f),f=ZH2(f+=t);else{f=OX2(t),t=mt(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=E_(f),f=ff2(r),t=bp(s),_=Hr(f^=t)}else M<22748?(f=UN2(r),t=fr(s),_=Fp2(f^=t),f=eI(t),t=N51(f),f=K31(f+=t)):(f=Y3(z),t=ei(f),_=aT2(f=f<=t),t=fY2([]));else if(M<22888)if(M<22838)f=ri(u),t=gp2(v),d=$51(f|=t),f=Da2(s),t=Bk(_),u=U6(f-=t);else{f=fN(z),t=ZT(f),_=Yk2(f=f<=t),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else M<22889&&(f=cV(t),t=a22(s),v=In1(f=f instanceof t),f=aq2(_),t=G1(w),u=Fr(f=t<f));else if(M<23163)if(M<23098)M<22933?(f=jr(t),t=wh1(f),f=WM2(r=Y61(f/=t)),t=y21(s),_=xc1(f^=t)):(f=v51(r),t=gl(s),_=_J2(f%=t),f=MA2(r),t=u31(f),f=ac1(f=f in t));else if(M<23099){f=j_2(t),t=ms(f),f+=t,f=r2(f);{f=gK(t),t=Xw1(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Uy1(f)}}else f=XI2(t),t=I12(s),v=ku2(f=f instanceof t),f=VB2(u),t=o62(f),t=_y2(f>>>=t);else if(M<23179)if(M<23164){f=nY(t),t=t8(r),f&=t,r=Vw(f);throw f=oU2(t)}else{{for(z.for_in_xh_cbb_list=p,f=e[i++],r=La1(_),g=[],t=0;t<f;t++)g.splice(0,0,Ev2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=oN2(s)}f=N61(_),t=Bd2(w),f=t<f,u=k_1(f)}else f=H1(t),t=O5(f),s=$F(f=t==f),f=xw2(t),t=Cw1(r),r=$R2(f&=t);else if(M<23671)if(M<23478)if(M<23364)if(M<23186)f=e[i++],t=KM(constantPool[f]),t=r41(h);else{for(f=c.CFf,t=0;t<f;t++)w=_V(t),z[w]=o[t];f=ey(t),t=WV2(f),f=ws2(f=t-f)}else M<23365?(f=me2(s),t=dM(_),f=Un1(u=n92(f-=t)),t=OY2(v),d=s92(f|=t)):(f=Ev(t),t=i21(f),f=Kh(s=FU2(f=t==f)),t=bB2(f),t=p2(f=t<=f));else M<23583?t=M<23479?(f=kN(u),t=e[i++],f?w=10:i+=t,_=Jr2(f),t52(void 0)):(f=tH2(u),t=sI(f),ec2(f>>>=t)):(_=i$(f),t=ja1(void 0),f=FE(_),t=al2(w),u=Ei1(f=t<f));else M<24039?M<23903?M<23672?(f=ed(t),f=QZ(r),t=e[i++],f?w=10:(i+=t,s=zj(f))):(_=g32(f),t=Ka2(void 0),f=yP2(),t=Dn1(),lf1(f=t!=f)):M<23904?(f=Eo(t),t=Ih(s),v=y12(f=f instanceof t)):(f=s52(u),t=vT2(f),t=qj(f>>>=t)):M<24153?t=M<24040?(f=_31(z),t=fv1(f),_=$81(f=f<=t),f=fs(u),t=ev(f),NL(f>>>=t)):(f=ov2(t),t=lk2(r),r=HO(f&=t),j_1(-(_=pm(t)))):(f=cc1(r),t=k8(s),_=dI(f%=t),f=e[i++],i+=f);else M<24678?M<24457?M<24361?M<24181?M<24158?(t=qd2([]),_=QV(r),f=Gw(typeof _)):(f=xN(r),t=qi(s),null!=(r=eZ2(_)).variablePool?n(r,t,f):r[t]=f):M<24182?(f=Ub1(_),t=_52(u),v=no(f*=t),f=rY(_),t=Y0(u),v=Kw1(f*=t)):(f=aa2(r),t=CN2(t),f=iI2(f<<=t)):f=M<24453?M<24362?(f=$n1(t),t=G01(f),r=Ay2(f/=t),f=l41(t),t=zw(r),null!=(r=Fz2(s)).variablePool?n(r,t,f):r[t]=f,nH2(r)):(_=Wy1(r),f=zP2(typeof _),f=Ib(r),t=yY(t),bz2(f<<=t)):(f=SU2(w),r=e[i++],f[constantPool[r]]+=1,Pm2({})):M<24642?M<24544?M<24458?(f=G3(t),t=Gn(r),r=uf2(f&=t),f=Im(u),t=E22(v),d=Q6(f|=t)):(f=nH(_),t=n22(u),f=Vu2(f=t!==f),f=e[i++],t=Lr(f)):M<24545?(_=GL(r),s=J81(!_),t=ND2(h)):(f=Z$2(t),t=Q_2(f),s=$t2(f=t==f),f=Rn(t),t=ls2(f),r=MA(f/=t)):M<24673?M<24643?(f=$u1(t),t=aD2(f),r=ge(f/=t),f=cx(t),r=e[i++],f?i+=r:w=9):(f=lr1(_),t=tP2(w),f=Yc1(u=xF2(f=t<f)),t=LR2(v),d=z_2(f|=t)):(f=XH2(r),t=Pg(s),_=AV(f>>=t),f=SI(u),t=qe1(f),t=uB2(f>>>=t)):M<25016?M<24933?M<24710?M<24679?(f=e[i++],t=Ca2(f),f=tv2(t),t=Ku(f),s=e41(f=t==f)):(f=Di(t),t=Ee(f),f=YO2(f+=t),f=Ou(r),r=e[i++],--f[constantPool[r]]):M<24711?(f=kr1(r),t=Yu2(s),_=Y8(f^=t),f=o31(r),t=cK2(s),null!=(r=SV2(_)).variablePool?n(r,t,f):r[t]=f):(f=Ge(t),t=RU(f),r=im(f=t===f),f=js1(t),t=Ls1(f),f=g_1(f=t-f)):M<25008?M<24934?(f=y32(_),t=kc1(u),f=vc1(f=t!==f),f=Vh(r),t=h31(r),t=Zv1(f=t<f)):(f=g91(r),t=ZV2(s),_=Ij(f>>=t),f=K_(r),(t=cQ(f)).push(f),f=ND(t)):(_=Av(r),s=ap(!_),f=e72(r),t=PF2(r),t=jd1(f=t<f)):M<25128?M<25072?M<25017?(f=HF2(z),t=xP2(f),_=ry1(f=f<=t),_=Aq2(f),t=dj2(void 0)):(f=TM2(r),t=by2(s),_=gX(f>>=t),f=bK(t),t=SN(r),null!=(r=gG2(s)).variablePool?n(r,t,f):r[t]=f,f=YC(r)):M<25073?(f=F3(r),t=e[i++],f?w=10:(i+=t,s=fK(f)),f=mg2(z),t=JN2(f),_=NW2(f=f<=t)):(f=VK(u),t=bz(f),t=IL(f>>>=t),f=Gj(r),t=qy1(f),f=IQ(f=f in t)):M<25219?M<25129?(f=Td2(t),t=Y11(f),f=fL(f+=t),f=a31(_),t=e[i++],f?(i+=t,v=y1(f)):w=10):(_=bu(t),t=Z92(-_),f=Kz(z),t=se2(f),_=Nr2(f=f<=t)):(f=uv2(_),t=$h2(w),u=LP2(f=t<f),f=vk2(t),r=e[i++],f?i+=r:w=9);else if(M<37242)if(M<31499)if(M<28478)if(M<26873)if(M<26170)if(M<25728)if(M<25541)if(M<25296){if(!(M<25248))return f=OC2(r),t=q_2(s),void(_=zS(f^=t));for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,ku()):s.splice(0,0,uy());for(f=VT(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=Qp(t)}else M<25297?(f=i82(r),t=je1(f),r=delete t[f],f=jL(r),f=T3(t),t=l02(f),r=gB(f=t===f)):(_=hz(r),f=U9(typeof _),f=ee2(u),t=o61(v),d=FC(f|=t));else M<25655?M<25542?(f=qP2(t),t=KQ2(f),r=Av1(f/=t),_=sV(f),t=to(void 0)):(f=ak2(r),t=e[i++],f?w=10:(i+=t,s=qD(f)),f=cB(),t=o32(),y4(f=t!=f)):v=(M<25656?(f=Bu2(_),t=Xn2(r),null!=(r=x92(f)).variablePool?n(r,t,f):r[t]=f,f=N22(_),t=J3(u),f*=t,YA):(f=iW(t),t=Gk2(s),v=ln(f=f instanceof t),f=Y82(_),t=v8(u),f*=t,qA2))(f);else if(M<25979)M<25841?M<25729?(f=xr(t),t=zk2(r),r=xS(f&=t),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=AT(f)):(f=e[i++],t=ff1(r),r=RG2(f),t[constantPool[f]]=r,f=u22(_),t=ZU(w),u=ST(f=t<f)):M<25842?(_=_i(r),f=zy2(~_),f=Yu(t),t=mQ(f),f=_g2(f=t-f)):(f=A91(),t=v11(),Y01(f=t!=f),f=WI2(s),t=Br2(f),t=mR(f=t<=f));else if(M<26088)if(M<25980)f=nc({}),f=Tx2(t),r=e[i++],f?i+=r:w=9;else{f=Fe1(_),t=E32(u),f=t!==f,f=$3(f);{for(z.for_in_xh_cbb_list=p,f=e[i++],r=R91(_),g=[],t=0;t<f;t++)g.splice(0,0,C32(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=SC(s)}}else f=Qk(s),t=xk2(f),f=$s(t=EB(f=t<=f)),t=ca2(s),v=hw1(f=f instanceof t);else if(M<26516)if(M<26420)M<26210?M<26171?(f=hn1(t),t=Re2(f),r=lD2(f/=t),f=x82(),t=F(),Li(f=t!=f)):(f=vC(_),t=qG2(u),v=kk(f*=t),f=eM2(s),t=Bo(_),u=Jz(f-=t)):f=M<26211?(f=eu2(t),t=xW(f),f=_R(r=ka(f=t===f)),t=Gd1(f),Lq2(f=f in t)):(f=fp2(u),t=DP1(v),d=o6(f|=t),SF2({}));else if(M<26472)if(M<26421)f=DY(u),t=e[i++],f?w=10:i+=t,f=$g2(r),t=o8(t),f=SA2(f<<=t);else for(f=Xf1({}),f=c.CFf,t=0;t<f;t++)w=Db2(t),z[w]=o[t];else for(f=EG2(z),t=ae2(f),_=yN2(f=f<=t),d=c.CFf,f=0;f<d;f++){let c=v=EJ(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else if(M<26739)if(M<26577)if(M<26517)f=Td1(r),t=Pf1(s),_=Yw1(f%=t),f=WY2(t);else{for(f=c.CFf,t=0;t<f;t++)w=W71(t),z[w]=o[t];f=Nz(s),t=oD2(_),u=aj2(f-=t)}else M<26578?(f=jV(t),t=A9(f),r=KA2(f=t===f),f=e[i++],t=p61(f)):(f=e[i++],t=A_1(constantPool[f]),f=vb(r),t=OD(f),f=GF2(f=f<t));else if(M<26858)if(M<26740)f=je2(r),t=vx(s),_=Xp2(f>>=t),f=Q4(t),t=Hh2(f),f=GQ(f=t-f);else for(f=H51(r),t=eb(f),f=ih1(f=f<t),d=c.CFf,f=0;f<d;f++){let c=v=Ee2(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else f=Bu1(r),t=lW(s),_=xK(f^=t),f=pQ2(t),t=Cn2(f),f=cP1(f=t-f);else if(M<27557)if(M<27370)if(M<27170)if(M<26972)M<26874?(f=qo2(_),t=F$(w),u=gh(f=t<f),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=WI(f)):(f=Ek(t),t=Yh2(f),f=Ol2(f+=t),f=lq(t),t=o51(f),r=l$(f/=t));else if(M<26973)f=jj(r),t=dj(s),_=m2(f^=t),t=zn2([]);else{for(f=z62(s),t=a62(f),t=HR2(f=t<=f),f=e[i++],r=tS2(_),g=[],t=0;t<f;t++)g.splice(0,0,dG2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=j42(s)}else if(M<27244)if(M<27171){for(f in p=[],t=xX(t))p.push(f);z.for_in_xh_cbb_list=p,f=Hs1(r),t=Yo(s),_=Ar(f>>=t)}else f=tO2(_),t=Gd(w),u=TX(f=t<f),t=ma2(h);else M<27245?(f=f21(r),t=Gp(s),_=q9(f^=t),t=wY(h)):(f=mi1(t),f=Fw(r),t=Jf(f),r=delete t[f],f=f81(r));else if(M<27433)if(M<27391)if(M<27371){for(f=yk2(r),t=RQ2(s),_=o7(f%=t),f=e[i++],r=M4(_),g=[],t=0;t<f;t++)g.splice(0,0,_b(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=dJ(s)}else{f=A5(t),t=Lw1(f),f=BZ2(f=t-f),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else if(M<27392){f=T9(s),t=ju1(_),f-=t,u=Gu2(f);{for(f in p=[],t=OH2(t))p.push(f);z.for_in_xh_cbb_list=p}}else f=A0(t),t=qc2(f),r=Y6(f=t===f),f=Tw(t),t=BT2(f),f=SP(f+=t);else if(M<27556){if(!(M<27434))return f=tf2(),t=aP2(),void w$2(f=t!=f);f=Nw1(r),t=m3(s),_=t31(f%=t),f=bJ(r),r=e[i++],--f[constantPool[r]]}else f=gc(s),t=D91(f),t=Fv1(f=t<=f),f=z6(z),t=_a1(f),_=OV(f=f<=t);else if(M<28050)if(M<27863)M<27712?M<27558?(f=E31(t),t=ko1(f),f=J(f=t-f),f=E$2(_),t=$d(r),null!=(r=p41(f)).variablePool?n(r,t,f):r[t]=f):(f=Pr2(t),t=$71(f),f=xy2(f+=t),f=s21(t),t=U01(f),r=Xe2(f=t===f)):M<27713?(f=NR(t),t=YN2(f),f=H5(f=t-f),f=n31(t),t=fl2(f),f=S72(f=t-f)):(f=Rm(r),t=Ci1(s),f=Nq(_=JX2(f^=t)),t=H_(r),null!=(r=XK(f)).variablePool?n(r,t,f):r[t]=f);else if(M<27998)if(M<27864)for(f=aC2(_),t=t32(w),u=R3(f=t<f),f=c.CFf,t=0;t<f;t++)w=N02(t),z[w]=o[t];else f=hR(s),t=k3(_),u=OY(f-=t),t=_m2([]);else f=gL(r),t=gx(r),t=ZJ2(f=t<f),f=SG2(r),t=e[i++],f?w=10:(i+=t,s=LD2(f));else if(M<28307)M<28196?M<28051?(f=Vn2(r),t=xp2(s),f=Ke(_=WY(f%=t)),t=cm(r),null!=(r=$v1(f)).variablePool?n(r,t,f):r[t]=f):(f=Gq(t),t=U1(f),f=Ga2(f+=t),f=N81(r),t=AZ2(s),_=tT(f%=t)):M<28197?(t=DI2([]),f=e[i++],t=Q92(r),r=k71(f),t[constantPool[f]]=r):(t=nb1([]),f=uI2(r),t=x1(s),_=UM2(f>>=t));else if(M<28380)M<28308?(f=xF(r),t=OP2(s),_=YA2(f^=t),f=y7(),t=hc1(),wb1(f=t!=f)):(f=nA(r),t=Zr2(s),_=Tj(f%=t),f=tl2(r),t=BW2(f),f=vf2(f=f<t));else{throw f=Gy2(t);f=$52(t),t=uq(f),f=t===f,r=TG2(f)}else if(M<30238)if(M<29182)if(M<28890)if(M<28655)if(M<28567)M<28479?(f=wg2(u),t=Wl1(f),t=W82(f>>>=t),_=cJ(r),f=jw(~_)):(f=xE2(_),t=Dt(u),f=f7(f=t!==f),t=Cc1(h));else if(M<28568)t=DR(h),f=em2(r),t=eO(t),f=ho1(f<<=t);else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=Er(t),t=p0(f),f=Cw2(f=t-f)}else M<28741?M<28656?(f=bL(r),t=AW(s),_=bN(f^=t),_=kq(r),f=CX(typeof _)):(f=tg(_),t=ZD(u),v=uY2(f*=t),f=k22(_),t=BJ2(u),v=fv(f*=t)):M<28742?(f=D61(r),t=IG2(r),t=or2(f=t<f),f=lX2(r),t=e[i++],f?w=10:(i+=t,s=L01(f))):(f=uN(u),t=aU2(v),d=jH(f|=t),f=sU2(_),t=uY(u),v=aI(f*=t));else if(M<29064)if(M<29034)if(M<28891){throw f=HP(t);f=hQ2(r),t=kH2(r),f=t<f,t=CB(f)}else{f=Fy({});throw f=HE(t)}else M<29035?(f=HF(_),t=_Z(w),u=p7(f=t<f)):(f=Zr1(r),t=qz(f),r=delete t[f],f=F_2(r));else if(M<29172)if(M<29065){throw f=U81(t);f=xh2(),t=Z61(),f=t!=f,m42(f)}else f=$B2(r),t=x71(s),_=iN(f>>=t),f=Vs(t),t=Mu(f),r=Hg(f/=t);else f=yC2(_),t=Ze2(u),f=oB2(f=t!==f),f=VS2({});else if(M<29583)if(M<29362)if(M<29223)if(M<29183)f=mY(r),t=gd(s),_=AR2(f%=t),f=qq(r),t=Yj2(s),_=kb(f^=t);else{f=s82(s),t=qK(_),f-=t,u=lz(f);{for(f in p=[],t=dC(t))p.push(f);z.for_in_xh_cbb_list=p}}else M<29224?(f=c92(s),t=x3(_),u=xr1(f-=t),f=Ns2(t)):(f=e[i++],t=ca1(r),r=xe(f),t[constantPool[f]]=r,f=Wc2(t),t=Xx(f),r=ob1(f/=t));else if(M<29519)if(M<29363){f=MQ(_),t=qu(w),f=t<f,u=Co2(f);{for(f=e[i++],r=Yf1(_),g=[],t=0;t<f;t++)g.splice(0,0,nw(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=Al1(s)}}else{{f=de1(t),t=dr1(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Mc2(f)}f=jm(u),t=sh1(v),f|=t,d=K22(f)}else f=eA(r),t=wJ(t),f=Iy1(f<<=t),f=Pa1(t),t=L31(f),r=PH(f/=t);else if(M<29908)if(M<29607)if(M<29584)f=zV2(r),t=Xr(t),f=$p(f<<=t),_=$M2(r),f=hy(typeof _);else{{for(f=e[i++],r=Cd1(_),g=[],t=0;t<f;t++)g.splice(0,0,Vn1(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=yf(s)}f=V$2(u),t=KB(v),f|=t,d=qx(f)}else M<29608?(f=B2(r),t=h01(s),_=L21(f%=t),f=ZK2(r),t=MG2(f),f=Y02(f=f in t)):(f=oT2(t),t=LV2(f),r=Q22(f/=t),y=zy1(f),null!=(P=Kr2(t)).variablePool?n(P,y,v):P[y]=f);else if(M<30182)if(M<29909)f=fQ2(t),f=e[i++],i+=f;else{{f=j0(t),t=MS2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=z1(f)}f=k(t)}else f=qH2(r),t=dl2(s),_=$b(f%=t),_=zJ2(f),t=mt1(void 0);else if(M<30808)M<30506?M<30382?M<30309?M<30239?(f=t61(t),f=Ic2(t),t=EK(f),r=Hy1(f=t===f)):(f=eF2(t),t=j31(f),r=X62(f/=t),f=EQ(z),t=Am(f),_=dm(f=f<=t)):M<30310?(f=CD(u),t=hh(v),d=E6(f|=t),f=jO(_),t=GO2(w),u=jM2(f=t<f)):(f=s1(_),t=UC(r),null!=(r=f6(f)).variablePool?n(r,t,f):r[t]=f,f=sF2(s),t=Bb2(f),t=bW(f=t<=f)):M<30459?M<30383?(_=Rw(t),t=Vq(-_),f=dH({})):(f=Ca(_),t=G71(u),v=IE(f*=t),f=D9(t),t=zn1(f),r=kT(f=t===f)):(f=M42(s),t=UV(f),t=HO2(f=t<=f),_=qy(f),t=Pt(void 0)):M<30711?M<30582?M<30507?(f=cf2(l=z),f=Hi1(r),t=VB(s),_=Jy1(f>>=t)):(f=W01(z),t=X02(f),_=dC2(f=f<=t),f=P21(u),t=U$2(f),t=We(f>>>=t)):M<30583?(t=ii1([]),f=bO2(t),t=v5(f),f=$Q2(f+=t)):(f=c9(u),t=bm(v),d=BP2(f|=t),t=GS2(h)):M<30744?M<30712?(t=oo([]),t=ci2(h)):(_=KJ2(r),s=qY(!_),f=_Y(t),t=RV(f),f=wW2(f=t-f)):(f=Ia1(s),t=oW(f),t=iD2(f=t<=f),f=Vh1(),t=h12(),fn(f=t!=f));else if(M<31340)if(M<31296)if(M<31162)M<30809?(y=Hu2(f),null!=(P=W31(t)).variablePool?n(P,y,v):P[y]=f,f=Yq2(_),t=nt1(w),u=nX(f=t<f)):(f=K71(t),t=ml1(f),f=gx2(f=t-f),f=V01(r),t=in1(f),f=lQ2(f=f<t));else if(M<31163){for(f=e[i++],r=wx2(_),g=[],t=0;t<f;t++)g.splice(0,0,IY(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=x4(s),f=l_1(t),t=Sr(f),r=QP1(f=t===f)}else f=hn(_),t=N3(u),f=gJ(f=t!==f),f=YV(r),t=mO2(s),null!=(r=B82(_)).variablePool?n(r,t,f):r[t]=f;else M<31335?t=(M<31297?(f=JF2(r),t=Yt1(f),f=TB(f=f in t),f=s3(u),t=nl1(f),f>>>=t,TD2):(f=sa2(u),t=R81(v),d=Md2(f|=t),f=P92(s),t=T62(f),f=t<=f,L7))(f):(f=Ow1(_),t=d8(w),u=vK2(f=t<f));else if(M<31442)if(M<31423)if(M<31341){f=eu1(z),t=a72(f),_=So(f=f<=t),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else y=t22(f),null!=(P=QT(t)).variablePool?n(P,y,v):P[y]=f,f=v01(t),t=xG2(r),r=hu1(f&=t);else M<31424?(f=tO(s),t=W_1(f),t=Nl1(f=t<=f),f=Xq(r),r=e[i++],--f[constantPool[r]]):(f=FB(r),t=Cd2(r),t=Jv(f=t<f),f=Bt1(w),r=e[i++],f[constantPool[r]]+=1);else M<31496?f=(M<31443?(f=Ky(r),t=BC(s),_=fe2(f^=t),f=g6(r),t=Eb1(f),f=f in t,Lz2):(f=pN({}),f=$G2(_),t=AO2(u),f=t!==f,Xf))(f):(f=h21(s),t=sf(_),u=FN(f-=t),f=jP1(r),r=e[i++],--f[constantPool[r]]);else if(M<34458)if(M<33043)if(M<32275)if(M<31824)if(M<31704)M<31664?M<31500?(_=AG(t),t=ls(-_),f=zU(u),t=tC2(f),t=Dv2(f>>>=t)):(f=MP1(),t=oy2(),N21(f=t!=f),f=M$(r),t=TZ2(t),f=QD2(f<<=t)):M<31665?(f=$J2(t),f=UL(t),t=Gs2(f),s=O52(f=t==f)):(f=Fs1(r),t=mG(s),_=gV2(f%=t),_=HV2(f),t=Gw1(void 0));else if(M<31779)M<31705?(f=e[i++],t=AD(f),f=vc(_),t=IX2(u),f=Dm2(f=t!==f)):(y=DJ(f),null!=(P=H42(t)).variablePool?n(P,y,v):P[y]=f,f=Oa2(z),t=DM(f),_=yi1(f=f<=t));else if(M<31780)f=AP1(),t=KP1(),we1(f=t!=f),t=xo1(h);else{for(f=Xi(s),t=$D(f),t=vG(f=t<=f),f=e[i++],r=rv2(_),g=[],t=0;t<f;t++)g.splice(0,0,P22(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=dS(s)}else if(M<32063)if(M<31986)M<31825?(f=DV2(t),r=e[i++],f?i+=r:w=9,f=oD(r),t=Wh1(s),_=aX(f>>=t)):(f=QQ2(s),t=Dr(_),f=cu1(u=z32(f-=t)),t=R01(f),t=le(f>>>=t));else if(M<31987){f=T92(t),t=mq(f),r=PS(f/=t),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else f=w7(r),t=cs2(s),_=vo2(f>>=t),f=b9(t),t=J91(s),v=hb(f=f instanceof t);else M<32131?M<32064?(f=si(t),t=w42(f),f=NC(f+=t),f=$x2(r),t=Es(s),_=iY(f^=t)):(f=y0(t),t=C51(f),r=lB(f/=t)):(f=bZ2(t),t=nF2(r),r=yh2(f&=t),f=lv1(s),t=MP2(f),t=fq(f=t<=f));else if(M<32545)if(M<32498)if(M<32397)M<32276?(f=Vw1(r),t=e[i++],f?w=10:(i+=t,s=CE(f)),f=AC(t),t=Y21(f),s=I_(f=t==f)):(_=oS(r),s=jp2(!_),_=xQ2(t),t=n32(-_));else if(M<32398)f=Dl1(t),t=bl1(f),f=Ql2(f=t-f),f=r_1(_),t=Y41(u),f=JQ(f=t!==f);else{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,e9()):s.splice(0,0,ht1());for(f=qU(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=ss2(_),t=Or(w),u=Go(f=t<f)}else if(M<32528)if(M<32499)_=pC(f),t=Pt2(void 0),y=Vb(f),null!=(P=Fm2(t)).variablePool?n(P,y,v):P[y]=f;else for(f=XD2(r),t=js2(s),_=x_(f^=t),d=c.CFf,f=0;f<d;f++){let c=v=BT(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else f=Ra(u),t=e[i++],f?w=10:i+=t,f=dt(r),t=GY2(s),_=sT2(f^=t);else if(M<32736)if(M<32691)M<32546?(f=e[i++],t=i_(constantPool[f]),f=if2(z),t=og2(f),_=sq(f=f<=t)):(f=RB2(t),t=y_(r),r=D_(f&=t),t=Wu(h));else if(M<32692)f=ua2(r),t=EP1(r),t=U$(f=t<f),f=O$2(s),t=XP(f),t=kO2(f=t<=f);else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}t=E5(h)}else M<32881?M<32737?(f=_k2(_),t=Zw2(w),u=Be(f=t<f),f=X81(r),(t=qo1(f)).push(f),f=Lh2(t)):(f=Y92(s),t=fW(_),u=RQ(f-=t),y=QM(f),null!=(P=HZ(t)).variablePool?n(P,y,v):P[y]=f):(f=$z2(t),t=dm2(f),f=QA(f=t-f),t=Mn2([]));else if(M<33501)if(M<33352)if(M<33291)M<33141?M<33044?(f=Pi(r),t=IT2(s),_=GI(f^=t),f=e[i++],t=IM(r),r=Ir1(f),t[constantPool[f]]=r):(f=xy1(u),t=P51(v),d=vw1(f|=t),_=Hb(r),f=b8(typeof _)):s=(M<33142?(f=T$(r),t=Gi1(s),_=mb(f>>=t),f=J$2(t),t=bb2(f),f=t==f,gi):(f=XK2(r),t=Ji2(s),_=ba2(f%=t),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),qi1))(f);else if(M<33338)if(M<33292)f=re1(s),t=Q32(f),t=cn1(f=t<=f),f=DF2(_),t=zI2(w),u=XJ(f=t<f);else{f=mc2({}),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else{for(d=c.CFf,f=0;f<d;f++){let c=v=x11(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}t=J21([])}else M<33426?M<33389?M<33353?(f=hi2(t),t=sv(r),null!=(r=nv2(s)).variablePool?n(r,t,f):r[t]=f,f=CO(r),f=Lu(t),t=r4(r),r=um(f&=t)):(f=Zq(_),t=Wp2(w),f=Er2(u=KP2(f=t<f)),t=TW2(f),t=H(f>>>=t)):M<33390?(f=Y_2(t),f=Ky2(u),t=sC(f),t=Kf2(f>>>=t)):(f=MO(),t=Mo1(),r72(f=t!=f),f=Jc2(l=z)):M<33482?M<33427?(f=wF2(r),t=s0(r),t=HG(f=t<f),f=pY(r),t=Pl2(t),f=MB2(f<<=t)):(f=r_2(t),_=Zu2(r),s=pT2(!_)):(f=Mb2(_),t=YP(u),f=w_1(f=t!==f),f=dp(r),r=e[i++],--f[constantPool[r]]);else if(M<34257)if(M<33970)if(M<33783)if(M<33502)_=Aj2(t),t=yO(-_),f=lR(r),t=R_1(s),_=JD(f>>=t);else for(f=qZ(s),t=H61(_),u=vR(f-=t),f=c.CFf,t=0;t<f;t++)w=me(t),z[w]=o[t];else r=(M<33784?(f=Ey2(_),t=Et2(u),f=Cj2(f=t!==f),f=VY2(t),t=Ic(f),f=t===f,M2):(f=Oi(s),t=sN2(f),t=sZ(f=wb2(t=AW2(f=t<=f))),f/=t,Kc1))(f);else M<34228?M<33971?(f=RY2(r),t=e[i++],f?w=10:(i+=t,s=Dy(f)),f=Py({})):(f=xA(t),t=IZ2(f),f=Ir2(r=vy2(f/=t)),t=tA2(r),t=lr(f=t<f)):(t=VT2([]),t=Zi2([]));else M<34397?M<34266?M<34258?(f=iy1(t),t=gT2(f),s=Tu1(f=t==f),f=e[i++],t=hu(f)):(f=In2(r),t=CP(t),f=PX2(f<<=t),f=KY2(t),t=U12(r),null!=(r=GC2(s)).variablePool?n(r,t,f):r[t]=f,f=Y_(r)):M<34267?(f=Qo(t),t=_2(f),s=dw2(f=t==f),t=rG2(h)):(f=e[i++],i+=f):M<34429?M<34398?(f=cW(_),t=EV2(w),f=eF(u=lx2(f=t<f)),t=zZ2(v),d=zI(f|=t)):(f=XD(t),t=Qr1(s),v=Vk2(f=f instanceof t),f=_N(r),t=Zz2(s),_=ya2(f>>=t)):(f=eV(z),t=hr2(f),_=lC2(f=f<=t),f=il1(u),t=EV(f),t=bh1(f>>>=t));else if(M<35890)if(M<35303)if(M<34962)if(M<34848)if(M<34707)if(M<34459){for(z.for_in_xh_cbb_list=p,f=e[i++],r=X12(_),g=[],t=0;t<f;t++)g.splice(0,0,sr2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Db1(s),f=vO2(s),t=tP(f),t=c21(f=t<=f)}else _=Sy2(r),s=Kv(!_),y=Cv2(f),null!=(P=s02(t)).variablePool?n(P,y,v):P[y]=f;else M<34708?(f=lf(_),t=Jf1(u),f=hI2(f=t!==f),f=Py1(t),t=hy1(f),r=Xe1(f/=t)):(f=Mt(r),t=Sa(s),_=EY(f^=t),f=h91(r),t=$o(s),_=Lq(f%=t));else M<34933?M<34849?(f=z81({}),f=z3(s),t=Nx2(f),t=Z51(f=t<=f)):(f=hg(s),t=UZ2(_),u=SP2(f-=t),f=Pc1(r),t=my1(f),f=mB2(f=f in t)):M<34934?(f=Ye2(t),t=t01(f),r=hf2(f/=t),f=Fu1(l=z)):(f=Ol(r),t=Ye(t),f=zG2(f<<=t),f=mS2(s),t=Ck(f),t=Dh1(f=t<=f));else if(M<35112)if(M<35096)if(M<34963){for(f=c.CFf,t=0;t<f;t++)w=Id2(t),z[w]=o[t];t=Jg([])}else f=_L(r),t=od(f),f=Kt2(f=f<t),f=bV(r),t=D02(f),f=fs1(f=f<t);else M<35097?(f=_s2(t),t=hZ2(f),s=Xz2(f=t==f),f=L2(_),t=e[i++],f?(i+=t,v=Dj2(f)):w=10):(f=rj(r),r=e[i++],--f[constantPool[r]],f=Wi(t),t=CW(f),s=Dq(f=t==f));else if(M<35259)if(M<35113){for(f in p=[],t=E52(t))p.push(f);z.for_in_xh_cbb_list=p}else f=HG2(t),t=av2(r),f=op(r=K21(f&=t)),t=vL(f),f=w92(f=f<t);else f=wu2(t),t=$b1(f),r=Yd1(f/=t),f=jw1(t),t=d41(r),r=hC2(f&=t);else if(M<35669)if(M<35425)if(M<35375)M<35304?(f=Vg(r),r=e[i++],--f[constantPool[r]],f=Ro1(u),t=fc1(v),d=ZC(f|=t)):(f=F71(r),t=x52(s),_=hH(f%=t),f=j(r),t=TY2(t),f=qC(f<<=t));else if(M<35376){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,a71()):s.splice(0,0,mC());for(f=oI(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=L61(r),t=aB2(s),_=ce2(f>>=t)}else f=ka1(),t=x91(),P91(f=t!=f);else M<35662?M<35426?(f=gy2(r),t=ED(s),_=Ho2(f%=t),f=zK2(t),t=eT2(f),f=$$(f=t-f)):(_=OM2(r),f=ue(typeof _),f=i72(),t=FA(),Hu(f=t!=f)):(_=Lm2(f),t=nx2(void 0),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Be1(f));else if(M<35828)if(M<35794)M<35670?(f=w21(u),t=Z72(v),d=Ef2(f|=t),f=BU(r),t=QP(s),_=SL2(f^=t)):(f=mv(t),t=D_1(r),null!=(r=pE2(s)).variablePool?n(r,t,f):r[t]=f,f=O(r));else{if(!(M<35795))return f=Lv1(r),t=He(s),void(_=rP2(f^=t));f=Oc(t),t=Bz(f),s=tg2(f=t==f),f=vf(_),t=f$2(u),f=dy2(f=t!==f)}else M<35873?M<35829?(f=nK2(t),r=e[i++],f?i+=r:w=9,f=D92(r),t=ks(s),_=Fg(f%=t)):(f=WT(t),r=e[i++],f?i+=r:w=9,f=n12({})):(_=g7(f),t=GL2(void 0),_=uu1(r),s=h4(!_));else if(M<36422)if(M<36151)if(M<35985)if(M<35896)if(M<35891){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,WJ2()):s.splice(0,0,Ec1());for(f=NT2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}t=Ne1([])}else f=d51(r),t=Mv1(s),null!=(r=tv1(_)).variablePool?n(r,t,f):r[t]=f,f=di(r),t=cY(s),_=g3(f%=t);else t=M<35897?(f=BV2(_),t=Is1(w),u=AS2(f=t<f),hV2([])):(f=Bn(s),t=sA(_),u=_F2(f-=t),Ia(h));else M<36092?M<35986?(f=ia(t),t=mL2(f),s=B1(f=t==f),t=ZR(h)):(f=_o(t),t=Fb1(f),r=rG(f=t===f),f=JL(t),t=Zk(r),r=ax2(f&=t)):(f=et(t),t=vm2(s),v=ce(f=f instanceof t),f=By(t),t=dL(f),f=qN2(f+=t));else if(M<36249)if(M<36174){if(M<36152)return void(f=gZ({}));f=vj2(u),t=Vf(f),t=Pv2(f>>>=t),f=Ie(_),t=PG(u),f=S6(f=t!==f)}else if(M<36175)for(t=pq([]),d=c.CFf,f=0;f<d;f++){let c=v=L6(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else f=ub1(t),t=B$2(f),r=ab1(f/=t),f=Ta1(t),t=O71(f),r=x(f/=t);else M<36297?M<36250?(f=vh(r),t=xq(s),null!=(r=T02(_)).variablePool?n(r,t,f):r[t]=f,t=YW2(h)):(f=To2({}),f=wx(t),t=UM(f),r=Ab2(f/=t)):(f=mr2(z),t=VQ2(f),f=rX2(_=cE2(f=f<=t)),t=e[i++],f?(i+=t,v=O91(f)):w=10);else if(M<36799)if(M<36696)if(M<36679)M<36423?(f=HS({}),f=Pd1(r),t=iU(s),_=Qa2(f^=t)):(f=te2(r),r=e[i++],--f[constantPool[r]],f=vl(_),t=Jv2(u),v=aw1(f*=t));else if(M<36680)f=Tf2(_),t=rh2(w),u=r42(f=t<f),f=tM(w),r=e[i++],f[constantPool[r]]+=1;else{for(f=l92(r),t=V1(s),_=XH(f^=t),z.for_in_xh_cbb_list=p,f=e[i++],r=Q12(_),g=[],t=0;t<f;t++)g.splice(0,0,Oh2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=nZ(s)}else if(M<36784)M<36697?(t=A42([]),f=lA2({})):(f=eI2(s),t=$g(f),t=PP1(f=t<=f),t=QU2(h));else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else M<37136?M<36912?M<36800?(f=p8(),t=HX(),Br(f=t!=f),_=S4(r),f=W2(~_)):(f=Q(s),t=Ss1(f),t=oa(f=Ke2(t=aR2(f=t<=f))),r=n9(f/=t)):M<36913?(f=Z3(),t=t_2(),O22(f=t!=f),f=Ab(r),t=o$(s),_=v7(f^=t)):(f=_D2(t),t=tf1(f),f=mm(f+=t),_=R6(f),t=m_1(void 0)):M<37209?M<37137?(f=T32(r),t=Sk(f),f=jI(f=f<t),_=Zi(r),f=p9(~_)):(f=Pd(t),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Fq2(f)):(f=_s1(_),t=rl2(w),u=rV2(f=t<f),f=cv1(_),t=iZ2(u),v=Gb1(f*=t));else if(M<43548)if(M<40335)if(M<39014)if(M<38354)if(M<38e3)if(M<37458)if(M<37348)M<37243?(f=nM2(z),t=B22(f),_=o3(f=f<=t),f=Jv1(w),r=e[i++],f[constantPool[r]]+=1):(_=vU2(f),t=wK(void 0),f=Fo2(r),t=tG2(f),f=UL2(f=f in t));else if(M<37349){for(f=YG2(r),t=T7(s),_=eh2(f>>=t),f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,vV2()):s.splice(0,0,Xc());for(f=zw2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}else _=oK2(f),t=QS2(void 0),f=e[i++],i+=f;else M<37867?M<37459?(f=SP1(_),t=bX2(u),v=S$2(f*=t),f=K_2(t),t=F91(f),s=M22(f=t==f)):(_=mm2(f),t=oC2(void 0),f=cd1(r),t=xS2(s),_=yj(f^=t)):M<37868?(f=U3(z),t=jZ2(f),_=mJ2(f=f<=t),f=QC(t),t=mh1(f),r=Gy(f=t===f)):(f=Zs(t),t=$91(f),s=jg(f=t==f),f=e[i++],t=zt2(r),r=GV2(f),t[constantPool[f]]=r);else if(M<38302)if(M<38232)if(M<38001)f=AH(r),t=SD2(s),_=cj2(f%=t),f=o71(t),t=kw2(f),f=Hi2(f=t-f);else{for(f in p=[],t=nQ2(t))p.push(f);z.for_in_xh_cbb_list=p,_=lb(f),t=JS2(void 0)}else M<38233?(f=Hw(_),t=V_1(r),null!=(r=zi2(f)).variablePool?n(r,t,f):r[t]=f,f=vK(s),t=NJ2(_),u=gv1(f-=t)):(f=E71(r),t=e[i++],f?w=10:(i+=t,s=cM2(f)),f=mF2(t),t=hk2(f),f=Pi2(f+=t));else if(M<38338){if(M<38303)return f=Vi(t),t=pr2(f),void(r=I62(f=t===f));f=v41(r),t=Mf2(f),r=delete t[f],f=$I2(r),f=RA2(r),t=ig2(s),_=Yw(f%=t)}else f=Q31(t),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=w$(f);else if(M<38576)if(M<38480)M<38437?null!=(r=M<38355?(f=Kc(t),t=ho2(r),r=qp(f&=t),f=tA(_),t=ZZ2(r),xN2(f)):(f=Ao2(r),t=Q$(r),t=vM2(f=t<f),f=ki(r),t=_e1(s),kI2(_))).variablePool?n(r,t,f):r[t]=f:M<38438?(f=wt(t),t=wR(f),f=nN2(f+=t),f=ao2(u),t=Dd1(v),d=k42(f|=t)):(f=mV2({}),f=Qh(s),t=gw(_),u=iE2(f-=t));else if(M<38550)if(M<38481)f=az2(s),t=cv2(_),u=vA2(f-=t),f=Wv1(r),t=fs2(r),t=Wn1(f=t<f);else for(t=gt2(h),f=c.CFf,t=0;t<f;t++)w=Mx(t),z[w]=o[t];else f=v71(s),t=nr2(f),t=FJ2(f=t<=f),f=vi1(_),t=wn(w),u=Vm2(f=t<f);else if(M<38899)if(M<38734)M<38577?(f=sB(t),t=Xs2(f),r=KK(f=t===f),f=xI2(u),t=Ei(v),d=DP(f|=t)):(f=q1(r),t=el(s),_=qZ2(f%=t),f=rd2(),t=k02(),S51(f=t!=f));else if(M<38735)f=Gf2(t),t=au2(s),v=Dw(f=f instanceof t),f=y42(z),t=Sn1(f),_=TC(f=f<=t);else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=E2(),t=Dw1(),ow1(f=t!=f)}else if(M<38959)if(M<38900){t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=G11(t),t=jf2(r),r=bG2(f&=t)}else f=G0(s),t=i6(_),u=QE2(f-=t),f=D1(w),r=e[i++],f[constantPool[r]]+=1;else f=Ga1({}),f=e[i++],t=_$(constantPool[f]);else if(M<39561)if(M<39203)if(M<39079)if(M<39056)if(M<39015)t=GZ(h),f=wo2(r),t=dc(s),_=v0(f^=t);else{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,eo()):s.splice(0,0,ln1());for(f=oe2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=so2(t),t=yr(f),f=kj2(f=t-f)}else r=(M<39057?(f=ke(u),t=ul2(f),f=Ht2(t=Kn(f>>>=t)),t=EJ2(r),f&=t,AY):(_=Ly1(r),s=pU2(!_),f=DD2(t),t=Oq2(f),f/=t,al1))(f);else if(M<39164)if(M<39080)f=bf2(s),t=OS2(_),u=BK(f-=t),f=zN2(_),t=C61(w),u=e61(f=t<f);else{for(z.for_in_xh_cbb_list=p,f=e[i++],r=Il(_),g=[],t=0;t<f;t++)g.splice(0,0,Hr1(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Rx(s),f=KM2(t),t=vi(f),f=B7(f=t-f)}else f=fA2(z),t=Ld2(f),_=le1(f=f<=t),f=ni1(r),t=e[i++],f?w=10:(i+=t,s=DV(f));else if(M<39419)M<39365?M<39204?(y=sb(f),null!=(P=Q_1(t)).variablePool?n(P,y,v):P[y]=f,f=gm2(),t=pS2(),kj(f=t!=f)):(f=Ui2(t),t=E81(f),r=wL(f=t===f),y=ko(f),null!=(P=rT(t)).variablePool?n(P,y,v):P[y]=f):M<39366?(f=OL(s),t=hG2(f),t=Ps2(f=t<=f),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=es(f)):(f=Ka1(_),t=YV2(w),u=Vd(f=t<f),f=yy2(t),t=Li2(f),r=Rk(f/=t));else if(M<39479)if(M<39420)_=np(r),f=M3(~_),f=Uw1(u),t=SS2(f),t=fQ(f>>>=t);else{for(d=c.CFf,f=0;f<d;f++){let c=v=g51(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=BJ(s),t=NS(f),t=b5(f=t<=f)}else{t=zt1(h),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else if(M<39902)M<39769?M<39732?M<39562?(f=eL(s),t=Mu1(_),u=mf2(f-=t),t=Ve2([])):(f=d01(t),t=v52(f),r=_V2(f/=t),f=Cy1(_),t=hD(u),f=v02(f=t!==f)):M<39733?(f=rO(t),t=yh1(f),r=M7(f/=t),_=yc1(r),s=xx2(!_)):(f=By1(_),t=Yj(u),f=sG2(f=t!==f),f=N31(_),t=Z42(u),f=x22(f=t!==f)):M<39834?M<39770?(f=Sv(_),t=n2(w),u=JC2(f=t<f),f=uz(r),t=Sj(f),r=delete t[f],f=S3(r)):(f=Yn2(t),f=o42(),t=cg(),_d(f=t!=f)):(t=q$([]),f=qw2(u),t=NQ(f),t=IO2(f>>>=t));else if(M<40134)if(M<39969)if(M<39903)throw f=Rn2(t);else f=kr(z),t=WN(f),_=u8(f=f<=t),t=lJ2(h);else M<39970?(_=C_2(r),s=oT(!_),f=bj(u),t=e[i++],f?w=10:i+=t):(f=kh(z),t=Bl(f),_=UY2(f=f<=t),f=e[i++],t=y5(f));else if(M<40241)M<40135?(f=iP(t),t=$8(f),f=Wd(f+=t),f=Xy2(t)):(f=Mq(),t=Cn(),VW2(f=t!=f),f=e[i++],t=kb2(constantPool[f]));else{f=ol(r),t=BD(s),_=NW(f>>=t),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else if(M<42136)if(M<41421)if(M<40684)if(M<40660)if(M<40403)if(M<40336)f=Ro2(r),t=A11(t),f=nR(f<<=t),f=zo(_),t=e[i++],f?(i+=t,v=E51(f)):w=10;else for(_=rl1(f),t=g82(void 0),f=c.CFf,t=0;t<f;t++)w=Nj(t),z[w]=o[t];else if(M<40404){f=k4(z),t=$01(f),f=f<=t,_=UW(f);{for(z.for_in_xh_cbb_list=p,f=e[i++],r=OP1(_),g=[],t=0;t<f;t++)g.splice(0,0,UW2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=cU2(s)}}else f=Gm2(r),t=jT(s),_=af(f^=t),f=$k(r),(t=g02(f)).push(f),f=iB(t);else M<40670?M<40661?(f=n_2(_),t=Wb2(u),f=lu(f=t!==f),f=SH(r),t=tz2(t),f=aH(f<<=t)):(f=JK2(r),t=DK2(s),_=hr1(f%=t)):M<40671?(f=jc(r),t=r_(f),f=fM2(f=f<t),f=Qg({})):(f=pg2(z),t=jD(f),_=nW2(f=f<=t),f=_u2(z),t=YK2(f),_=R62(f=f<=t));else if(M<41261)M<40894?M<40685?(f=Z8(_),t=a92(w),u=hJ2(f=t<f)):(f=tj2(r),r=e[i++],--f[constantPool[r]],f=d32(t),t=iS2(r),r=R12(f&=t)):M<40895?(f=KW(s),t=Yl2(_),u=tQ2(f-=t),f=vA(_),t=wt2(u),v=Xa1(f*=t)):(t=c31(h),f=tc2(z),t=pN2(f),_=VU2(f=f<=t));else if(M<41411){if(!(M<41262))return f=Vq2(t),t=x72(f),void(s=tF2(f=t==f));for(t=z12([]),f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,ch2()):s.splice(0,0,U02());for(f=qR2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}else{for(f=a7(r),t=Qx2(s),_=g9(f>>=t),f=e[i++],r=UX2(_),g=[],t=0;t<f;t++)g.splice(0,0,ek2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=gf1(s)}else if(M<41646)if(M<41505)if(M<41465)M<41422?(f=Ww(r),t=IC2(s),_=I61(f%=t),t=dx([])):(f=fx(r),t=UB(s),_=Ng2(f%=t),f=Gl1(z),t=AA2(f),_=Qq(f=f<=t));else if(M<41466){{for(z.for_in_xh_cbb_list=p,f=e[i++],r=F31(_),g=[],t=0;t<f;t++)g.splice(0,0,R02(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=qt(s)}f=V8(r),t=Xn1(s),f>>=t,_=TM(f)}else{f=FK(),t=c_2(),f=t!=f,nV(f);for(f=c.CFf,t=0;t<f;t++)w=Gc(t),z[w]=o[t]}else if(M<41632)if(M<41506)f=m9(r),t=B_2(s),f=e1(_=md2(f^=t)),t=SZ2(w),u=G82(f=t<f);else{{for(z.for_in_xh_cbb_list=p,f=e[i++],r=dV2(_),g=[],t=0;t<f;t++)g.splice(0,0,gO(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=w41(s)}f=TD(z),t=Jc1(f),f=f<=t,_=fI(f)}else _=UH(f),t=wZ2(void 0),f=sN(t);else if(M<42083)M<41837?M<41647?(f=e[i++],t=V7(constantPool[f]),f=Mt1(),t=EX2(),IK2(f=t!=f)):(f=Y_1({}),f=z42(r),t=yI2(s),_=Zx(f%=t)):M<41838?(f=IU2(t),t=Qr2(f),r=Ln2(f/=t),f=e[i++],t=qt2(r),r=Cs1(f),t[constantPool[f]]=r):(f=oY2(t),t=ys1(f),r=y$2(f=t===f),f=Pu1(s),t=um2(_),u=gv2(f-=t));else if(M<42108)if(M<42084)t=V72(h),f=bC(t),t=FO2(r),r=xu(f&=t);else{{for(z.for_in_xh_cbb_list=p,f=e[i++],r=RB(_),g=[],t=0;t<f;t++)g.splice(0,0,En1(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=jw2(s)}t=U42([])}else{f=mN2({});{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,nb2()):s.splice(0,0,K_1());for(f=rS2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}}else if(M<43176)if(M<42981)if(M<42624)M<42269?M<42137?(f=gq2(_),t=MU2(w),u=f12(f=t<f)):(_=zs(f),t=CG2(void 0),f=Br1(s),t=Wq2(f),t=BZ(f=t<=f)):M<42270?(_=je(r),f=bW2(~_),f=Bl2(),t=wM2(),Lo(f=t!=f)):(_=Qk2(r),s=mn1(!_),y=ph(f),null!=(P=P$2(t)).variablePool?n(P,y,v):P[y]=f);else if(M<42859)M<42625?(f=gI(t),r=e[i++],f?i+=r:w=9,f=_a2(r),t=A81(s),_=i_1(f>>=t)):(f=zH2(t),t=V32(r),r=mx(f&=t),t=Y2(h));else{for(f in p=[],t=OJ(t))p.push(f);z.for_in_xh_cbb_list=p,t=Se(h)}else if(M<43094)if(M<43021)if(M<42982){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,C$()):s.splice(0,0,ER2());for(f=px(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=VR2(s),t=pX2(_),u=NZ(f-=t)}else f=rd(r),t=rv(s),_=eg(f%=t),f=He2(r),(t=wd2(f)).push(f),f=Ob(t);else M<43022?(f=e[i++],t=kq2(r),r=n52(f),t[constantPool[f]]=r,f=e_(t),t=k1(r),r=J0(f&=t)):(f=e[i++],t=Cz(constantPool[f]),f=pe1(_),t=JP(u),v=B9(f*=t));else M<43150?s=(M<43095?(f=Kf(r),t=f11(r),f=(t=Dp(f=va(t=PI2(f=t<f))))==f,lj2):(f=N7(r),t=rw2(s),_=$Q(f^=t),f=DJ2(t),t=EA(f),f=t==f,U41))(f):(f=UD2({}),f=k21(r),(t=_b2(f)).push(f),f=Nc1(t));else if(M<43254)if(M<43212)if(M<43202)if(M<43177)f=Y1(r),t=K42(f),f=E01(f=f<t),f=PL2(t),t=dQ2(f),f=ns(f+=t);else{f=S52(t),t=jy2(f),r=D(f=t===f),f=CV(t),t=Ij2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Yz(f)}else M<43203?(f=Fv(),t=ux(),u81(f=t!=f),f=pP2(t),t=DZ2(r),r=FH(f&=t)):(_=y82(t),t=Tn(-_),f=JZ2(r),t=G31(s),_=ts2(f>>=t));else M<43251?M<43213?(f=tI2(r),t=gw1(t),f=N92(f<<=t),f=W12(t),t=bU2(f),f=Sh(f+=t)):(f=ni2(u),t=w61(f),t=Qt1(f>>>=t)):(f=at2(t),t=Mv2(f),s=dN2(f=t==f),f=O6(t),t=pv2(r),r=lX(f&=t));else if(M<43390)if(M<43314)if(M<43255)for(d=c.CFf,f=0;f<d;f++){let c=v=Ud2(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else f=d7(t),t=j$2(r),r=uo2(f&=t);else M<43315?(f=EK2(t),t=hU2(f),f=ll(f+=t)):(f=OV2(_),t=e[i++],f?(i+=t,v=NQ2(f)):w=10,f=Ss(t),t=_U2(f),s=SE2(f=t==f));else M<43446?M<43391?(t=YH(h),f=nr1(t),t=KV2(f),r=t6(f/=t)):(f=tJ2(_),t=QH2(u),v=mf1(f*=t),t=C92(h)):(f=vX(t),t=Io(r),f=x42(r=l_2(f&=t)),t=DF(r),t=Ru(f=t<f));else if(M<46911)if(M<45196)if(M<44489)M<44223?M<43932?M<43823?M<43549?(f=a42(_),t=Ch2(w),u=Dd(f=t<f),f=Gn1(),t=YM(),c6(f=t!=f)):(f=x9(r),t=Z2(s),_=c02(f^=t),f=wo1(t),t=Hp2(s),v=dY2(f=f instanceof t)):M<43824?(f=Eh1(t),f=is2(t),t=xl2(f),f=rM(f=t-f)):(f=aH2(t),t=pX(f),r=yV2(f/=t),f=id1(s),t=jL2(f),t=Zl1(f=t<=f)):M<44095?M<43933?(f=av(s),t=I92(f),t=Ll2(f=t<=f),y=d12(f),null!=(P=ah2(t)).variablePool?n(P,y,v):P[y]=f):(f=e[i++],i+=f,f=tV(t),t=K8(f),r=G(f/=t)):t=M<44096?(function(){for(z.for_in_xh_cbb_list=p,f=e[i++],r=nu1(_),g=[],t=0;t<f;t++)g.splice(0,0,PY2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Zu1(s)}(),f=L71(s),t=c01(f),Sr1(f=t<=f)):(f=d1(t),t=Ge1(f),r=Ci(f=t===f),f=e[i++],_K2(constantPool[f])):M<44295?M<44288?M<44224?(f=zG(u),t=n4(f),f=ix(t=tj(f>>>=t)),t=Q7(r),r=SG(f&=t)):(f=Eh2(s),t=xY2(_),u=s71(f-=t),f=uf(t),t=I0(f),f=Wt2(f=t-f)):M<44289?(f=W52(r),t=r7(r),t=QG(f=t<f),f=zg(r),t=AX(s),null!=(r=m72(_)).variablePool?n(r,t,f):r[t]=f):(f=Wz2(t),t=EQ2(f),f=_y1(f+=t),f=qy2(),t=O0(),sg(f=t!=f)):M<44380?f=M<44296?xi1(t):(f=rA2(r),t=no1(t),f=u0(f<<=t),f=Xy(t),t=uM2(r),null!=(r=SC2(s)).variablePool?n(r,t,f):r[t]=f,as2(r)):(f=v91(z),t=al(f),_=Zm(f=f<=t),_=Rb2(r),s=R$(!_));else if(M<44982)M<44595?M<44575?t=M<44490?(f=Rb(s),t=tt(_),u=UH2(f-=t),eT(h)):(f=xs2(s),t=IT(f),t=be(f=t<=f),f=W61(s),t=aw2(f),h61(f=t<=f)):M<44576?(f=_b1(t),t=mu1(f),r=Rz2(f=t===f),f=uF2(_),t=v$(w),u=Z_(f=t<f)):(f=by(l=z),f=B31(r),t=Ck2(s),_=hd(f^=t)):M<44641?M<44596?(f=Ka(s),t=X9(_),u=WE(f-=t),f=er(s),t=Zb2(f),t=K7(f=t<=f)):(f=r32(t),t=k31(f),r=EH2(f=t===f),f=vS(t),t=aY(r),null!=(r=$Y2(s)).variablePool?n(r,t,f):r[t]=f,f=_Q2(r)):(t=VL2(h),f=BE(t),t=Fh2(f),s=kn2(f=t==f));else if(M<45128)if(M<45035)if(M<44983){for(z.for_in_xh_cbb_list=p,f=e[i++],r=Yv(_),g=[],t=0;t<f;t++)g.splice(0,0,fV(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=bP(s),f=nf2(r),t=qX(f),f=Kd(f=f<t)}else f=e[i++],i+=f,f=ZL(t),t=s9(f),s=Yy2(f=t==f);else if(M<45036)f=MZ(t),t=RS(f),f=yE(f+=t),f=BR2(t),t=oP(f),r=w4(f=t===f);else{for(f=MM2(r),t=k32(t),f=$31(f<<=t),f=e[i++],r=wS(_),g=[],t=0;t<f;t++)g.splice(0,0,SR2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=Rr2(s)}else M<45162?M<45129?(f=YY(z),t=$n2(f),_=TG(f=f<=t),f=Oz2(t),t=kd(s),v=j52(f=f instanceof t)):(_=H0(r),f=BI(~_),f=By2(r),t=b_1(t),f=NN2(f<<=t)):(f=W11(t),r=e[i++],f?i+=r:w=9);else if(M<45915)if(M<45573)if(M<45320)if(M<45295){if(!(M<45197))return f=w_2(t),t=$11(f),void(f=JZ(f+=t));f=Pv(s),t=w62(f),f=hA2(t=EI2(f=t<=f))}else M<45296?(f=cw(t),f=Jy(r),t=e[i++],f?w=10:(i+=t,s=vt1(f))):(f=JM(t),t=aY2(f),r=y52(f/=t),f=mK(u),t=Tt2(v),d=_G2(f|=t));else if(M<45415)if(M<45321)t=DL([]),f=gH(r),t=gF2(f),r=delete t[f],f=UI(r);else{f=K52(t),t=aM2(f),f=t===f,r=dy1(f);{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,WO()):s.splice(0,0,OS());for(f=jJ2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}}else f=eG2(s),t=C1(f),t=UP2(f=t<=f);else if(M<45770)if(M<45731){if(!(M<45574))return _=Ju(r),void(s=pY2(!_));_=wk(r),s=Fc(!_),f=X22(r),t=E42(s),_=bQ(f>>=t)}else M<45732?(f=pu2(t),t=OU2(f),r=O11(f/=t),f=bl(t),t=dh2(r),r=T$2(f&=t)):(f=i1(r),r=e[i++],--f[constantPool[r]],f=ZD2(u),t=cX(v),d=Lh1(f|=t));else M<45850?t=M<45771?(f=Wa(s),t=ns2(f),t=fl1(f=t<=f),f=Pj2(r),t=ny(r),Vo(f=t<f)):(f=qf2(r),t=Aq(t),f=Hn1(f<<=t),aS2([])):(_=Id1(f),t=y22(void 0),f=ll1(r),r=e[i++],--f[constantPool[r]]);else if(M<46335)if(M<46212)M<45958?M<45916?(f=Bf(t),t=aM(f),f=RF2(f=t-f),f=Nu(_),t=zW(u),v=Ey1(f*=t)):(f=fj2(r),t=oE(f),f=l0(f=f<t),f=vg({})):M<45959?(_=pa2(f),t=S0(void 0),_=qB(f),t=H52(void 0)):(f=oZ2(t),t=nh1(f),r=DG(f=t===f),f=tp2(t),t=xT(f),r=SL(f=t===f));else if(M<46283)M<46213?(f=e[i++],t=$j2(constantPool[f]),f=kS(_),t=vB2(w),u=Tn2(f=t<f)):(f=Od(t),t=NY2(f),r=XG2(f=t===f),f=X$2(s),t=$6(f),t=GP2(f=t<=f));else{f=A$2(u),t=At1(f),f>>>=t,t=i91(f);{for(z.for_in_xh_cbb_list=p,f=e[i++],r=F72(_),g=[],t=0;t<f;t++)g.splice(0,0,xm2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=dt1(s)}}else if(M<46702)if(M<46517)M<46336?(t=yK2([]),f=q52(r),r=e[i++],--f[constantPool[r]]):(f=ex2(r),t=UJ2(r),t=sx2(f=t<f),f=Zp2(s),t=xC(_),u=oR2(f-=t));else if(M<46518)f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=AM2(f),f=y92({});else{f=WS2(t),t=dd2(f),f=t-f,f=zr2(f);throw f=gb2(t)}else M<46792?M<46703?(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Jl2(f),_=dU(f),t=O51(void 0)):(f=xz2(r),(t=RD(f)).push(f),f=E0(t),f=Hh1(r),t=s51(s),_=Le(f>>=t)):(f=uC2(r),(t=mM(f)).push(f),f=f22(t),f=wf2(r),t=_z(f),f=om(f=f<t));else if(M<48503)if(M<47670)if(M<47300)M<47136?M<47002?M<46912?(f=Ks(_),t=k$2(u),f=My1(f=t!==f),f=Yy(u),t=e[i++],f?w=10:i+=t):(f=cn(_),t=SY(w),u=n72(f=t<f),f=en1(_),t=zo2(w),u=w52(f=t<f)):M<47003?(f=ac(_),t=e[i++],f?(i+=t,v=K01(f)):w=10,f=Kw2(r),t=fX2(s),_=cG(f^=t)):(f=$w1(t),t=vt2(f),r=S01(f/=t),f=zw1(t),t=Ps(f),f=QY2(f+=t)):M<47229?M<47137?(f=PE(z),t=ZO(f),_=Sw1(f=f<=t),f=Wu1(s),t=N01(f),t=WC2(f=t<=f)):(f=fM(t),t=jr1(s),v=VZ(f=f instanceof t),f=lt1(_),t=kD2(u),v=XL(f*=t)):M<47230?(f=bw2(_),t=e[i++],f?(i+=t,v=YF(f)):w=10,f=bT(_),t=Ig(w),u=nt2(f=t<f)):(f=tu1(t),t=VP2(f),f=Wh2(f+=t),f=GH2(r),t=Ua1(s),_=iO(f^=t));else if(M<47403)if(M<47343)M<47301?(f=_f({}),f=e[i++],t=Cy(constantPool[f])):(f=qW2(_),t=kA2(u),f=Xw(f=t!==f),f=e[i++],i+=f);else{if(M<47344)throw f=oz(t),t=_h(f),f=pB2(f+=t),f=vB(t);t=uo([]),f=xa2(r),t=Ya(s),null!=(r=Wi2(_)).variablePool?n(r,t,f):r[t]=f}else if(M<47470)if(M<47404)_=fc(t),t=Ar1(-_),f=tr1(t),t=Hx2(r),r=G$(f&=t);else{for(f=e[i++],r=J02(_),g=[],t=0;t<f;t++)g.splice(0,0,gE(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=mH2(s),t=mX2([])}else f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=V21(f),f=bh2(t),t=b41(f),r=do1(f=t===f);else if(M<48200)M<47762?M<47742?M<47671?(f=nl(r),(t=Be2(f)).push(f),f=d22(t),f=d72(u),t=N$(v),d=GX2(f|=t)):(f=Vv(t),r=e[i++],f?i+=r:w=9,f=bj2(s),t=jf1(_),u=xl1(f-=t)):M<47743?(f=Al(s),t=sP1(f),t=wa1(f=t<=f),f=e[i++],t=SY2(f)):(f=F$2(s),t=r61(f),t=aF(f=t<=f),f=aR(r),t=LU2(s),null!=(r=z9(_)).variablePool?n(r,t,f):r[t]=f):M<48189?(M<47763?(f=nx(t),t=a52(f),s=R61(f=t==f),function(){throw f=vv1(t)}):(f=qj2(t),t=O82(f),f=Uz(f=t-f),function(){for(f=c.CFf,t=0;t<f;t++)w=on2(t),z[w]=o[t]}))():(f=Dq2(r),t=Vw2(s),_=LM(f^=t),f=Tj2(r),t=wc1(s),_=JV2(f>>=t));else if(M<48379)M<48255?M<48201?(t=Mb1(h),_=z8(f),t=MH2(void 0)):(f=Ux2(r),t=L_(s),_=HR(f>>=t)):M<48256?(f=WK(u),t=hd1(v),d=ZI2(f|=t),f=Ml(t),t=MV2(s),v=su1(f=f instanceof t)):(t=PX(h),f=cS(r),t=Ji1(t),f=e01(f<<=t));else{if(!(M<48422))return f=Iv(r),t=b31(s),void(_=uL(f^=t));t=M<48380?(f=rB2(),t=n62(),QE(f=t!=f),_=gL2(f),F01(void 0)):(f=wp2(t),t=qr2(r),f=$22(r=$w(f&=t)),t=zi1(r),wN2(f=t<f))}else if(M<49541)if(M<48959)if(M<48755)if(M<48530)t=M<48504?(_=K12(f),t=zf2(void 0),J_2([])):(function(){throw f=Et1(t)}(),f=T1(r),t=v32(r),qS2(f=t<f));else if(M<48531){f=Un2(z),t=Gk(f),f=f<=t,_=wU(f);{f=XS(t),t=Mz(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Sn2(f)}}else f=D22(t),t=Kx(f),f=Ib2(r=CU2(f=t===f)),t=ob2(f),r=delete t[f],f=qk(r);else M<48906?M<48756?(f=u62(r),t=n71(s),null!=(r=qz2(_)).variablePool?n(r,t,f):r[t]=f,f=vH2(_),t=xw(w),u=N1(f=t<f)):(_=gA2(r),s=cL2(!_),f=i9(t)):(f=Es1(z),t=$C(f),_=St1(f=f<=t),f=D01(t),t=qa(f),r=Dy1(f/=t));else if(M<49193)if(M<49098)if(M<48960)f=J01(t),t=Gr2(f),f=$72(f=t-f),f=HC2(t),t=hO2(f),r=B62(f=t===f);else{throw f=jd2(t);f=f3(r),t=f01(r),f=t<f,t=qb2(f)}else M<49099?(f=JO2(r),t=u41(f),r=delete t[f],f=Ze(r),f=nd1(r),t=af1(r),t=mo1(f=t<f)):(f=r11(t),t=ok(r),r=yz2(f&=t),f=Cc(_),t=GN(u),f=lh(f=t!==f));else M<49329?M<49194?(f=jY(r),t=Au2(f),f=es1(f=f<t),f=ug({})):(f=e[i++],t=yi(r),r=ZN2(f),t[constantPool[f]]=r,f=tq2(u),t=d3(f),t=OE(f>>>=t)):(f=Ww1(s),t=Lk(f),t=bI(f=t<=f),f=Fi1(_),t=H92(u),f=Bn1(f=t!==f));else if(M<49921)if(M<49852)if(M<49680)if(M<49542)f=np2(t),t=h3(s),v=xB(f=f instanceof t),t=ju2([]);else{f=rQ2(r),t=ww2(r),f=t<f,t=hb2(f);throw f=UF2(t)}else M<49681?(_=EI(r),f=d62(typeof _),f=VH2(r),t=_32(s),_=D32(f%=t)):(_=VM(f),t=a9(void 0),f=n51(_),t=oX(u),f=wn1(f=t!==f));else if(M<49906)M<49853?(f=Vk(r),t=oQ2(s),_=C11(f>>=t),f=e[i++],i+=f):(f=_j2(t),t=T11(f),f=Xd2(f=t-f),f=XA2(r),t=WA2(s),_=U51(f%=t));else{{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,qs()):s.splice(0,0,Op());for(f=qw(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}f=T52(t),t=UK2(f),f=t==f,s=mE(f)}else if(M<50146)if(M<50073)M<49922?(f=e[i++],t=Vd1(f),f=cL(t),t=wP2(f),f=bY(f+=t)):(f=AC2(_),t=R21(r),null!=(r=yo(f)).variablePool?n(r,t,f):r[t]=f,f=Zh2(t),t=FC2(r),r=Ec2(f&=t));else if(M<50074){{f=P71(t),t=Er1(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Wo1(f)}_=MO2(f),t=cP2(void 0)}else f=Nv1(r),t=We1(t),f=kZ(f<<=t),f=Se1(t),t=Me(f),r=uR(f=t===f);else if(M<50435)if(M<50147){f=Xt1(t),t=j11(f),f/=t,r=wi1(f);throw f=oa2(t)}else f=Nc(t),t=Fh(r),f=Yi2(r=tq(f&=t)),t=ss(f),r=delete t[f],f=OT(r);else _=Z(t),t=Zs2(-_),f=To1(t),t=jN2(r),r=rT2(f&=t);else if(M<74758)if(M<63355)if(M<56838)if(M<53995)if(M<52058)if(M<51289)if(M<50993)if(M<50688)M<50630?M<50473?(_=dc2(r),s=p11(!_),f=XG(t),r=e[i++],f?i+=r:w=9):(f=Rq2(r),t=W32(s),_=RO2(f%=t),f=Um(r),t=Cd(f),f=dL2(f=f<t)):(M<50631?(t=ad2(h),f=nR2(),t=Rl1(),f=t!=f,nZ2):(function(){throw f=pV(t)}(),f=qT(),t=sc2(),f=t!=f,f91))(f);else if(M<50759)f=M<50689?(f=Vl2(u),t=kR2(v),d=ke1(f|=t),f=Yb1(_),t=i0(u),a$(f=t!==f)):(f=iW2(r),t=e[i++],f?w=10:(i+=t,s=m1(f)),us({}));else if(M<50760)f=dv(),t=O62(),ny2(f=t!=f),_=u4(t),t=ec1(-_);else{for(f=c.CFf,t=0;t<f;t++)w=G32(t),z[w]=o[t];_=I02(r),s=gd2(!_)}else M<51161?M<51104?M<50994?(t=ri2([]),_=TV(r),f=Tk2(~_)):(f=Zu(u),t=kt1(v),d=JK(f|=t),_=Ts1(f),t=K3(void 0)):f=M<51105?(f=Fl(u),t=B52(f),t=pL2(f>>>=t),f=o_1(r),t=qC2(f),QO(r=delete t[f])):(f=kW2({}),U4({})):M<51259?f=M<51162?(f=oB(t),t=Wn2(f),s=Wo(f=t==f),f=W92(_),t=ek(u),ui1(f=t!==f)):(f=vr1(r),t=zc(t),f=of1(f<<=t),Oo1(t)):(f=UX(r),t=Px2(s),_=h22(f>>=t),f=T71(r),t=Ds2(s),_=y61(f%=t));else if(M<51683)if(M<51516)if(M<51436)M<51290?(f=JL2({}),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=HM(f)):(f=xt2(z),t=gS(f),_=Nd1(f=f<=t),f=hW(s),t=Of1(f),t=zA2(f=t<=f));else{if(!(M<51437))return _=gR(f),void(t=lO2(void 0));f=St(t),t=VY(s),v=Mg2(f=f instanceof t),f=aW2(r),t=Nz2(f),f=Ji(f=f<t)}else M<51599?M<51517?(f=PW2(_),t=a51(u),f=th(f=t!==f),f=e[i++],t=X2(r),r=$o1(f),t[constantPool[f]]=r):(f=Oc1(s),t=pZ2(_),u=ru2(f-=t)):(t=z7([]),f=x61(t),t=Wv(f),f=z21(f+=t));else if(M<51984)M<51849?M<51684?(f=$U2(_),t=w8(u),v=gZ2(f*=t),f=da1(r),t=iv(r),t=$41(f=t<f)):(f=jR2(s),t=$J(f),f=ir(t=SO2(f=t<=f)),t=gz2(r),r=Ef1(f&=t)):M<51850?(f=jX(t),t=Z4(f),s=QN2(f=t==f),f=$f(t),r=e[i++],f?i+=r:w=9):(f=Xd(u),t=gc1(f),t=na2(f>>>=t));else if(M<52050)M<51985?(f=GD2(_),t=Y9(u),f=C6(f=t!==f),f=zb2(z),t=eP(f),_=cM(f=f<=t)):(f=u_1(r),t=Hq(s),_=wX2(f>>=t),f=Hj2(t),t=zE(f),r=bs1(f=t===f));else{f=pS({});{f=Zx2(t),t=Ot1(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=JY(f)}}else if(M<53012)if(M<52396)if(M<52290)if(M<52173)if(M<52059){for(_=To(f),t=g$2(void 0),f=e[i++],r=Ws2(_),g=[],t=0;t<f;t++)g.splice(0,0,m_2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=hd2(s)}else _=S_(f),t=NA(void 0),f=W21(_),t=Uc(w),u=t3(f=t<f);else M<52174?(f=JH2(r),t=QX2(s),_=rE2(f%=t),f=Nu2(r),t=pC2(r),t=$O2(f=t<f)):(f=R41(t),t=ZJ(f),s=nd2(f=t==f),f=e[i++],i+=f);else M<52334?M<52291?(y=bB(f),null!=(P=ds2(t)).variablePool?n(P,y,v):P[y]=f,f=Tl1(s),t=yT2(f),t=Qe1(f=t<=f)):(f=SJ(t),f=qO2(w),r=e[i++],f[constantPool[r]]+=1):M<52335?(f=ec(r),t=y_2(s),_=lV2(f>>=t),f=s5(z),t=k_(f),_=R5(f=f<=t)):(f=ew2(_),t=e[i++],f?(i+=t,v=B4(f)):w=10,f=ei1(t),t=wC(f),r=B72(f=t===f));else if(M<52861)if(M<52821)if(M<52397){{f=o11(t),t=Xx2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Gh2(f)}f=eb2(t),t=zd1(r),f&=t,r=Se2(f)}else f=GN2(_),t=hW2(r),null!=(r=ft(f)).variablePool?n(r,t,f):r[t]=f,f=Si2(r),t=T8(s),_=oP1(f^=t);else M<52822?(f=Jj(_),t=aC(r),null!=(r=f_1(f)).variablePool?n(r,t,f):r[t]=f,_=Ea1(r),s=Mc(!_)):(f=K82(_),t=pT(w),u=p72(f=t<f),f=VN(s),t=M_1(f),t=vr2(f=t<=f));else if(M<52980)M<52862?(f=X7(_),t=Y52(u),f=Og2(f=t!==f),t=UP1([])):(f=cy2(z),t=qp2(f),_=N9(f=f<=t),f=e[i++],i+=f);else{{f=l42(t),t=AB2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=G42(f)}f=hU(t),t=aa(f),f=t==f,s=mo(f)}else if(M<53448)if(M<53151)if(M<53031)M<53013?(f=zR(_),t=cq(w),u=XT(f=t<f),f=Uq2(_),t=gu1(r),null!=(r=T2(f)).variablePool?n(r,t,f):r[t]=f):(f=h62(u),t=aO(v),d=M0(f|=t),f=Oy2(_),t=YQ(u),v=GV(f*=t));else{if(M<53032)return _=F12(r),void(s=zQ2(!_));_=Xi1(r),f=T01(typeof _),f=iJ(t),t=a11(r),r=Fc1(f&=t)}else if(M<53320)if(M<53152)_=qh(f),t=ZX2(void 0),_=Zt1(r),f=Xg2(~_);else{{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,eb1()):s.splice(0,0,Dh2());for(f=la(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}f=zR2(t),t=I2(f),f=t===f,r=xO(f)}else f=i01(t),r=e[i++],f?i+=r:w=9,f=Pp2(r),t=py2(r),t=Vz2(f=t<f);else if(M<53754)M<53596?M<53449?(f=oq2(r),t=Ww2(s),_=lR2(f>>=t)):(f=W8(t),t=w2(f),f=cT(f+=t),f=Ds1(u),t=CP1(v),d=g71(f|=t)):f=M<53597?(t=z$(h),NM(l=z)):(_=$N(r),f=h92(typeof _),da({}));else if(M<53955)r=(M<53755?(f=TI2(t),t=RK(f),f=FS2(f+=t),f=Os(t),t=H82(r),f&=t,E9):(f=a61(r),t=Vj(r),t=H71(f=L62(t=YE(f=t<f))),f/=t,$i1))(f);else{f=uQ2(t),t=PV2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=p5(f)}else if(M<55424)if(M<54896)if(M<54335)if(M<54184)M<54054?M<53996?(f=OI2(z),t=L0(f),_=ow2(f=f<=t),f=oq(r),t=X72(r),t=Wk(f=t<f)):(f=MZ2(l=z),f=vg2(t),t=Zh1(f),f=nP(f+=t)):M<54055?(f=Z7(),t=Cv1(),bk2(f=t!=f),t=Xo(h)):(f=_N2(r),t=H81(s),_=Ha1(f%=t),f=Xs1(t),t=Z5(f),s=Ma(f=t==f));else if(M<54253)if(M<54185)f=A41(t),t=a3(s),v=Xm2(f=f instanceof t),f=q72(t),t=b61(f),r=nV2(f=t===f);else{for(f=M81(r),t=LA2(s),_=Jx(f>>=t),f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,Wk2()):s.splice(0,0,WU());for(f=oo1(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}else M<54254?(f=MD(r),t=b52(s),_=CK2(f^=t),f=Rc(r),t=NB(f),r=delete t[f],f=U5(r)):(f=Qc1(_),t=Pa(u),f=jW(f=t!==f),f=e[i++],i+=f);else M<54677?M<54473?f=M<54336?(f=Y81(t),r=e[i++],f?i+=r:w=9,f=S8(_),t=YH2(u),Oe1(f=t!==f)):(f=ql2(t),t=q31(f),r=$h1(f=t===f),$H(~(_=j5(r)))):M<54474?(f=gb1(r),t=$C2(s),_=pd(f%=t),f=M61(t)):(f=i81(s),t=sL2(_),u=Ne2(f-=t),f=Yx2(r),r=e[i++],--f[constantPool[r]]):M<54884?M<54678?(f=m91(r),t=e[i++],f?w=10:(i+=t,s=m7(f))):(f=TA2(r),t=rm2(t),f=r$(f<<=t),f=eH2(_),t=UI2(u),v=m62(f*=t)):(f=C91(s),t=AN(_),u=AS(f-=t),f=fa(r),(t=t1(f)).push(f),f=jC(t));else if(M<55201)if(M<55038)if(M<54997)M<54897?(_=CU(f),t=Ae2(void 0),f=kh2(r),t=jP(s),_=nu(f^=t)):(f=oL(t),t=Te1(f),f=PA(f=t-f),f=Nm2(t),t=KG(f),f=_n2(f+=t));else if(M<54998)for(f=NA2(r),t=Ju2(t),f=S41(f<<=t),f=c.CFf,t=0;t<f;t++)w=Gh1(t),z[w]=o[t];else f=BA2(r),t=Wj2(s),null!=(r=$m(_)).variablePool?n(r,t,f):r[t]=f,f=dh(z),t=q22(f),_=Oe(f=f<=t);else if(M<55173)M<55039?(f=mo2(r),t=us2(f),f=xh1(f=f<t),f=wE2(),t=Eq2(),zk(f=t!=f)):(t=Qi1([]),f=e[i++],i+=f);else for(_=He1(r),s=q02(!_),f=c.CFf,t=0;t<f;t++)w=pt2(t),z[w]=o[t];else if(M<55276)if(M<55236)if(M<55202){t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=aI2(t),t=Te(f),f=Re1(f=t-f)}else{for(f=QW(t),t=VL(f),r=ZF(f=t===f),f=e[i++],r=cD2(_),g=[],t=0;t<f;t++)g.splice(0,0,CO2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=Ls2(s)}else M<55237?(f=Vs1(t),t=eO2(f),f=sh2(r=Qu(f/=t)),t=NU2(f),f=Ar2(f=f<t)):(f=Wf1(r),t=hX2(s),null!=(r=sk(_)).variablePool?n(r,t,f):r[t]=f,f=Yo1(z),t=xU2(f),_=NU(f=f<=t));else M<55395?M<55277?(f=KT(t),t=Ac1(r),r=q61(f&=t),f=IS2(t)):(f=I01(_),t=ou2(u),v=Ya2(f*=t),f=QL(r),r=e[i++],--f[constantPool[r]]):(f=AD2(r),t=RK2(s),null!=(r=dr2(_)).variablePool?n(r,t,f):r[t]=f,f=py(u),t=Xm(f),t=vv2(f>>>=t));else if(M<56089)if(M<55708)if(M<55567)if(M<55461)if(M<55425)f=Ys1(r),t=DL2(s),_=rg(f>>=t),y=ju(f),null!=(P=Uv2(t)).variablePool?n(P,y,v):P[y]=f;else for(f=Kl1(t),t=tL2(f),s=h9(f=t==f),d=c.CFf,f=0;f<d;f++){let c=v=WN2(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<55462?(f=qY2(s),t=JP2(_),f=jF2(u=$p2(f-=t)),t=Yo2(v),d=aS(f|=t)):(t=Gs(h),f=e[i++],i+=f);else M<55599?M<55568?(f=rN(z),t=h5(f),_=sJ2(f=f<=t),t=Zc1(h)):(f=w22(r),t=YN(r),t=Qv2(f=t<f),f=pc1(u),t=e[i++],f?w=10:i+=t):(f=pn2(t),t=nn(f),f=Yt2(f=t-f),f=m92(u),t=l_(f),t=fx2(f>>>=t));else M<56014?M<55810?M<55709?(f=Dj(t),t=HB2(f),r=Gc2(f=t===f),_=g22(f),t=Fy2(void 0)):(_=uS2(r),s=Po2(!_),f=bH(_),t=_9(u),v=iw2(f*=t)):M<55811?(f=iu1(r),t=m71(f),f=i92(f=f<t),f=P62(u),t=e[i++],f?w=10:i+=t):(f=wv2(_),t=gM2(w),u=lr2(f=t<f),f=u6(r),t=Ow2(s),_=WU2(f^=t)):M<56027?M<56015?(f=y51(r),t=cy1(r),t=xD2(f=t<f),f=Hd1(_),t=ji1(w),u=M01(f=t<f)):(f=mj(_),t=Qs2(r),null!=(r=qf(f)).variablePool?n(r,t,f):r[t]=f,f=Ja1(r),t=oZ(t),f=MF(f<<=t)):(f=uI(t),t=Bv1(f),s=xl(f=t==f),f=AA({}));else if(M<56565)if(M<56369)if(M<56196)M<56090?(f=IL2(_),t=Jh2(u),v=_c1(f*=t),t=Nk2([])):(f=DU(u),t=Mk2(f),t=EX(f>>>=t),f=Xq2(w),r=e[i++],f[constantPool[r]]+=1);else if(M<56197){t=E82(h);{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,nT()):s.splice(0,0,Jo());for(f=fS(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}}else f=ga(_),t=po1(u),f=z11(f=t!==f);else M<56456?M<56370?(f=O01(t),t=VU(f),r=PO(f/=t),_=bv(r),s=X(!_)):(f=Q9(t),t=Vt1(f),f=Bm2(f=t-f),f=ai2(r),t=xM2(f),f=Dt2(f=f in t)):t=jz(h);else if(M<56689)M<56632?M<56566?(f=HI(t),t=us1(f),r=vm(f=t===f),f=e[i++],t=zK(r),r=i32(f),t[constantPool[f]]=r):(f=uv1(r),t=fz2(s),f=V71(_=V(f^=t)),t=qu1(u),f=Qh1(f=t!==f)):M<56633?(f=om2(_),t=Q02(w),u=zl(f=t<f),_=i51(f),t=VV(void 0)):(f=dZ(),t=ZE2(),r31(f=t!=f),f=wj2(r),t=on1(s),_=Ch(f>>=t));else if(M<56822)if(M<56690)_=aU(t),t=Jt1(-_),f=Qn(r),t=y2(s),_=Ah2(f>>=t);else{{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,$a1()):s.splice(0,0,ox2());for(f=mp2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}f=oA(t)}else f=Mj(r),t=ta1(f),f=Q42(f=f<t),f=yZ2(t),t=QQ(f),r=hx2(f/=t);else if(M<60038)if(M<58735)if(M<57808)if(M<57317)M<57179?M<57050?M<56839?(f=Ey(r),t=b2(r),t=P3(f=lF2(t=kE(f=t<f))),r=n$(f/=t)):(f=Wl2(_),t=vl1(w),u=Tt1(f=t<f),f=wm(_),t=xR2(u),f=SQ(f=t!==f)):M<57051?(t=FG2([]),f=Si(t),t=Jh(r),r=vZ2(f&=t)):(f=HC(r),(t=Hf(f)).push(f),f=IH2(t),f=Q82(t)):M<57262?t=M<57180?(f=er2(t),t=yv(r),r=hM2(f&=t),_=lo1(f),C$2(void 0)):(t=P12([]),f=X0(u),t=mW2(f),Wq(f>>>=t)):M<57263?(f=sv1(r),t=KP(s),_=xg(f^=t),f=X_2(s),t=Zo1(_),u=X32(f-=t)):(f=e8(r),t=Xe(f),f=dG(f=f<t),f=E8(_),t=fb(u),v=Xv2(f*=t));else if(M<57724)if(M<57481)if(M<57318)f=tJ(_),t=h_1(u),f=_I2(f=t!==f),f=ng2(r),r=e[i++],--f[constantPool[r]];else{f=NZ2(r),t=i11(s),_=oi(f%=t),f=Nh(t),t=BV(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Yt(f)}else t=M<57482?(f=nE(t),t=Rf2(r),null!=(r=AM(s)).variablePool?n(r,t,f):r[t]=f,f=at1(r),f=vz2(r),t=gg(r),wi(f=t<f)):(function(){for(f in p=[],t=wo(t))p.push(f);z.for_in_xh_cbb_list=p}(),kX(h));else M<57781?M<57725?(_=rh1(r),s=OA(!_),f=$O(r),t=Rr(r),t=bx2(f=t<f)):(f=Ag2(r),t=Nk(s),_=jg2(f%=t),f=l12(t),t=os(f),f=ye2(f+=t)):(f=t02(r),t=MR2(r),t=bE(f=t<f),f=r0(r),t=mv2(f),f=mc(f=f in t));else if(M<58255)if(M<58139)M<57979?M<57809?(f=J52({}),f=qx2(r),t=$K2(s),null!=(r=e62(_)).variablePool?n(r,t,f):r[t]=f):(f=lD(r),t=_51(t),f=MI(f<<=t),t=Ni1(h)):M<57980?(f=e[i++],t=L42(constantPool[f]),f=S42(t),t=Nh1(r),r=bf1(f&=t)):(f=A7(t),t=jb(f),f=XF2(f=t-f),_=oG2(f),t=c3(void 0));else if(M<58249){if(M<58140)throw f=Gl2(r),t=K9(s),_=jr2(f^=t),f=Mw2(t);f=u92(t),t=bi(f),s=RZ(f=t==f),f=wW(u),t=e[i++],f?w=10:i+=t}else f=CY(r),t=qv1(s),_=sJ(f>>=t),f=wF(r),t=k82(t),f=fZ2(f<<=t);else if(M<58440)if(M<58361)t=M<58256?(f=Tp2(_),t=zB(w),u=Ui1(f=t<f),f=LP1(r),t=SX(r),WS(f=t<f)):(f=cZ(t),li1(h));else if(M<58362)f=xv1(r),t=pi2(r),t=oL2(f=t<f),f=e[i++],t=S82(constantPool[f]);else{f=cZ2(r),t=sT(s),_=L92(f>>=t),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else M<58639?M<58441?(t=Fn([]),f=mt2(u),t=zh(v),d=t9(f|=t)):(f=Tr1(s),t=QP2(_),u=uM(f-=t),f=LQ2(l=z)):(_=fy(t),t=Mg(-_),f=Rv(s),t=SB(f),t=Al2(f=t<=f));else if(M<59182)if(M<58960)if(M<58854)if(M<58767){if(M<58736)return f=U0(t),t=iJ2(f),void(f=xq2(f=t-f));f=_M2(t),t=Yk(f),s=hg2(f=t==f),f=U31(t),t=b11(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=nw2(f)}else if(M<58768){f=ah1(t),t=zQ(f),f=t==f,s=kR(f);{for(f=e[i++],r=b32(_),g=[],t=0;t<f;t++)g.splice(0,0,_h1(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=fi(s)}}else t=kt(h),f=dz2(t),t=C81(f),s=aF2(f=t==f);else if(M<58928)if(M<58855){{f=Jw(t),t=a0(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=xP(f)}t=DS2([])}else f=cA2(r),t=cz2(f),f=Rd(f=f in t),f=SO(_),t=uW2(u),v=g_2(f*=t);else f=L41(r),t=Jj2(s),f=u3(_=a02(f>>=t)),t=ax(w),u=Zh(f=t<f);else if(M<59088)M<59020?M<58961?(f=Wj(t),f=e[i++],t=O61(constantPool[f])):(f=Jk(r),t=vk(s),_=Za2(f>>=t),f=aK(r),t=Pl(s),_=e4(f^=t)):M<59021?(f=$2(t),t=CZ(f),f=Qe(f=t-f),_=Rl2(f),t=Cc2(void 0)):(f=zl2(u),t=e[i++],f?w=10:i+=t,f=Ou1(s),t=$t(_),u=Ry1(f-=t));else if(M<59137)if(M<59089){for(z.for_in_xh_cbb_list=p,f=e[i++],r=H$(_),g=[],t=0;t<f;t++)g.splice(0,0,RE(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=rI2(s),f=Xa(r),t=rm(t),f=eN(f<<=t)}else _=WQ2(r),f=NG2(typeof _),f=tU(u),t=dF2(v),d=dw1(f|=t);else f=jT2(),t=ag2(),gn1(f=t!=f),f=G5(t),t=$l2(r),null!=(r=YI(s)).variablePool?n(r,t,f):r[t]=f,f=HZ2(r);else if(M<59566)if(M<59324)if(M<59201){if(!(M<59183))throw f=j21(t);f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=ht2(f),f=v42(),t=yV(),Bi1(f=t!=f)}else if(M<59202)f=fu(w),r=e[i++],f[constantPool[r]]+=1,f=FW2(_),t=j72(w),u=$4(f=t<f);else{for(f=r6(s),t=$7(_),u=Gu1(f-=t),f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,LZ2()):s.splice(0,0,if1());for(f=K72(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}else if(M<59557)if(M<59325){for(f=XT2(t),t=qQ(f),r=MW(f=t===f),f=e[i++],r=hb1(_),g=[],t=0;t<f;t++)g.splice(0,0,IA2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=sK2(s)}else f=UY(u),t=xb1(v),d=Fd2(f|=t),f=c61(u),t=NP1(f),t=Db(f>>>=t);else f=oc2(z),t=uE2(f),_=ai(f=f<=t),f=rE({});else if(M<59830)M<59646?t=M<59567?(f=rJ2(t),t=q(r),r=Ra2(f&=t),wh(h)):(f=BB(_),t=e[i++],f?(i+=t,v=TI(f)):w=10,_=Ri2(f),Gx2(void 0)):s=M<59647?(t=C4(h),iw1(!(_=l31(r)))):(f=ov(r),t=or1(f),f=mz(f=f in t),f=yF(t),t=jY2(f),pB(f=t==f));else if(M<59843)if(M<59831)f=e[i++],t=AR(constantPool[f]),f=w72(s),t=tC(f),t=WL(f=t<=f);else{f=V92(t),t=Sy(f),r=tm(f/=t),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,LN2()):s.splice(0,0,ke2());for(f=AN2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=C5(r),t=G7(f),f=wN(f=f<t)}else if(M<61663)if(M<60930)if(M<60637)if(M<60421)if(M<60242)M<60039?(f=e[i++],i+=f,f=as1(u),t=LQ(v),d=Rw2(f|=t)):(f=ZA2(t),t=$e1(f),f=ZV(f+=t),f=iM2(r),t=Pc(s),null!=(r=Kh2(_)).variablePool?n(r,t,f):r[t]=f);else if(M<60243){for(f in p=[],t=oP2(t))p.push(f);z.for_in_xh_cbb_list=p,f=tP1(t),t=yE2(f),r=Fh1(f=t===f)}else f=ia1(t),t=fT(f),r=vZ(f/=t),f=Ez(t);else if(M<60605)if(M<60422)f=XQ(z),t=hO(f),_=K61(f=f<=t),f=aQ(s),t=e7(_),u=Ho(f-=t);else for(f=zL2(t),t=Qb(f),f=$S2(f=t-f),d=c.CFf,f=0;f<d;f++){let c=v=FV(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<60606?(f=F02(r),t=Gd2(f),f=Ym(f=f<t),f=e[i++],t=UE(constantPool[f])):(f=AU(r),t=gM(s),_=QW2(f^=t),_=xn2(r),f=Rp2(~_));else if(M<60798)if(M<60754)if(M<60638)f=pI(t),t=lL(r),null!=(r=iD(s)).variablePool?n(r,t,f):r[t]=f,f=xt(r),t=kk2([]);else{{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,Gv1()):s.splice(0,0,Ok2());for(f=ol2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}f=hP(t)}else if(M<60755){{for(f in p=[],t=$l1(t))p.push(f);z.for_in_xh_cbb_list=p}f=As2(r),t=f92(f),f=f<t,f=pE(f)}else f=RW2(z),t=T61(f),_=lM2(f=f<=t),f=e[i++],i+=f;else M<60859?f=M<60799?(f=qB2(r),t=bZ(s),_=h51(f%=t),f=Bl1(r),t=KC2(f),o5(r=delete t[f])):(f=Oh1(t),t=Sg2(r),f=dO2(r=G41(f&=t)),t=Zp(f),ve1(f=f<t)):(f=gU2(r),t=oy(s),_=au(f>>=t),f=Sm2(r),t=g42(s),_=yQ(f%=t));else if(M<61242)M<61159?M<61091?M<60931?(f=FG(t),t=Tx(r),r=xt1(f&=t),_=A2(t),t=E21(-_)):(f=Xl1(_),t=sn2(w),u=Ly(f=t<f),f=lj(s),t=K0(_),u=OZ(f-=t)):(M<61092?(f=q01(r),t=Bs1(t),f=Zf2(f<<=t),function(){f=W3(t),t=I82(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=M$2(f)}):(f=_u1(_),t=Id(u),f=AH2(f=t!==f),function(){for(z.for_in_xh_cbb_list=p,f=e[i++],r=K(_),g=[],t=0;t<f;t++)g.splice(0,0,Ce1(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=hF(s)}))():M<61241?M<61160?(f=D$(u),t=sO2(v),d=XQ2(f|=t),t=F8([])):(_=ME(r),s=wc(!_),f=qv2(t),t=hL(f),f=bg(f=t-f)):(f=Im2(t),t=Yr(f),f=Uo1(f=t-f),t=$n([]));else if(M<61479)M<61349?M<61243?(f=O7(_),t=g72(u),f=CL(f=t!==f),f=Kt1(r),t=oS2(r),t=b51(f=t<f)):(f=oz2(r),t=AJ2(s),null!=(r=Q91(_)).variablePool?n(r,t,f):r[t]=f,_=jv2(r),s=_61(!_)):f=(M<61350?(f=mM2(u),t=ln2(f),f=(t=hL2(f=Qp2(t=jR(f>>>=t))))-f,md):(f=o82({}),f=g81(r),t=l62(t),f<<=t,KZ2))(f);else if(M<61609)if(M<61480)f=iC(s),t=pi1(f),t=sn1(f=Pz(t=l72(f=t<=f))),r=YS2(f=t===f);else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=Cn1(s),t=CC(_),u=fF(f-=t)}else{for(f=zT2(r),t=QI2(t),f=df1(f<<=t),f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,ub()):s.splice(0,0,Nb());for(f=FP1(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}else if(M<62564)if(M<62205)if(M<61775)if(M<61744)if(M<61664){for(f=l71(t),t=BF(f),f=pr(f+=t),f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,PK2()):s.splice(0,0,d31());for(f=TK(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}else f=eo2(t),t=XE(f),f=B41(s=AP2(f=t==f)),t=iP2(f),t=_w(f=t<=f);else M<61745?(f=A4(r),t=qs2(f),f=W1(f=f<t),f=mW(t),t=Gg2(r),null!=(r=GO(s)).variablePool?n(r,t,f):r[t]=f,f=qF(r)):(f=BL2(l=z),f=VC(r),t=Ut(s),_=dU2(f>>=t));else M<62107?M<61776?(f=we2({}),_=Pw1(r),f=vF(~_)):(f=xy(t),t=Zy(f),f=x32(f=t-f),f=oi1(_),t=E62(u),v=H_2(f*=t)):(f=Tm2(r),t=c_(f),f=qg2(f=f in t),f=_G(),t=Sg(),L_2(f=t!=f));else M<62328?M<62226?M<62206?(f=Tv1(r),(t=jh2(f)).push(f),f=mD(t),_=Y4(r),s=x81(!_)):f=_7(t):M<62227?(f=J9(r),t=wX(s),_=hc(f>>=t),f=BP(s),t=Nr1(_),u=nX2(f-=t)):(f=oO2(r),t=OO(s),_=PP(f%=t),t=Hp([])):M<62435?M<62329?(f=WZ(t),t=ft2(f),r=Rw1(f=t===f),f=FB2(s),t=Lz(_),u=_n1(f-=t)):(f=rh(r),r=e[i++],--f[constantPool[r]],f=cs1(t)):(f=mH(_),t=I9(w),u=s61(f=t<f),f=AF2(s),t=Fi(_),u=aK2(f-=t));else if(M<62917)if(M<62733)M<62708?M<62565?(_=cs(r),s=R1(!_),f=hZ(r),r=e[i++],--f[constantPool[r]]):(f=gW2(z),t=eQ(f),_=W81(f=f<=t),f=e[i++],t=p22(constantPool[f])):M<62709?(f=TF2(r),t=ra2(s),_=sE(f%=t),t=Hs([])):(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=G92(f),f=qg(t),t=Ft1(r),r=gs(f&=t));else{if(!(M<62850))throw f=tb(r),t=m6(r),t=fy2(f=t<f),f=VG(t);M<62734?(f=Ws({}),f=Pu(u),t=Tg2(v),d=rp(f|=t)):(t=eX([]),t=d$2(h))}else if(M<63186)M<63019?M<62918?(f=Z11(z),t=xd1(f),_=T_(f=f<=t),f=MF2(t),t=xd2(f),r=vQ(f/=t)):(f=A_2(t),r=e[i++],f?i+=r:w=9,f=vw(z),t=oR(f),_=sj(f=f<=t)):M<63020?(f=VX2(t),f=Nb1(r),t=Tb1(s),_=bG(f^=t)):(f=e[i++],i+=f,f=s32(t),t=FD2(f),f=EP2(f+=t));else if(M<63274)M<63187?(f=kF(t),t=Hn2(f),r=Ko1(f=t===f),f=nE2(t),t=GB2(f),r=od1(f/=t)):(_=S9(r),s=yl(!_),f=_A2(),t=Ng(),Ee1(f=t!=f));else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=PD(_),t=sb2(u),v=uu(f*=t)}else if(M<68835)if(M<66491)if(M<64895)if(M<64258)if(M<63903)if(M<63517)if(M<63441)if(M<63356)f=e92(),t=x12(),ma1(f=t!=f),f=ru1(r),t=UG2(s),_=dk(f%=t);else{for(f=MG(t),t=vf1(f),r=N$2(f/=t),f=e[i++],r=zP1(_),g=[],t=0;t<f;t++)g.splice(0,0,Kp(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=Xb1(s)}else M<63442?(f=bA(r),t=GS(f),f=Xn(f=f in t),f=Y91(r),t=VO2(s),_=T81(f>>=t)):(f=s$(t),t=Dp2(f),f=OT2(r=I42(f=t===f)),t=H22(f),r=delete t[f],f=eS2(r));else M<63557?M<63518?(f=v62(r),r=e[i++],--f[constantPool[r]],f=N42(_),t=r02(u),v=Dl(f*=t)):(f=nO2(l=z),f=Sv2(s),t=e22(_),u=CH(f-=t)):M<63558?(f=Ge2(u),t=z02(f),t=Vn(f>>>=t),f=nh(r),t=Ce2(s),_=mq2(f%=t)):(f=e[i++],t=OB(f),f=YX2(u),t=uC(f),t=Bp2(f>>>=t));else if(M<64147)if(M<64106)M<63904?(f=ZQ(s),t=iM(f),t=_4(f=t<=f),f=Hv1({})):(f=rx2(s),t=qn1(f),t=X21(f=t<=f),f=JG(r),t=fm(s),null!=(r=$c(_)).variablePool?n(r,t,f):r[t]=f);else if(M<64107){for(d=c.CFf,f=0;f<d;f++){let c=v=E61(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=zi(r),t=Vy(s),_=lH2(f%=t)}else f=gp(_),t=_t1(u),v=nU(f*=t),f=wp(u),t=uh2(v),d=xP1(f|=t);else M<64189?f=(M<64148?(f=YR2(r),t=KC(r),t=Xv1(f=t<f),l=z,XO):(_=ah(r),s=Qo1(!_),l=z,in2))(z):(f=AQ(_),t=C2(w),u=Vl(f=t<f),f=e[i++],t=Nj2(r),r=Bc(f),t[constantPool[f]]=r);else if(M<64628)if(M<64476)if(M<64277){if(M<64259)throw f=iL(t);f=os1(u),t=_A(f),t=sc(f>>>=t),f=gN2(_),t=kB(u),v=$r1(f*=t)}else if(M<64278)f=$K(),t=K51(),wL2(f=t!=f),f=V61(l=z);else{for(f in f=rZ2(r),t=E91(t),f=GP1(f<<=t),p=[],t=c0(t))p.push(f);z.for_in_xh_cbb_list=p}else M<64542?M<64477?(f=o72(s),t=Pi1(f),t=kT2(f=t<=f),f=e[i++],i+=f):(_=Ek2(r),s=HY(!_),f=e[i++],t=PV(constantPool[f])):(f=fN2(t),f=v92(r),t=B_(s),_=Ko(f>>=t));else M<64762?M<64707?M<64629?(f=nz(_),t=Hh(w),u=UR(f=t<f)):(_=RN2(r),s=BR(!_),f=MX(r),t=q3(t),f=g41(f<<=t)):M<64708?(f=lh1(_),t=HY2(w),u=Np2(f=t<f),f=o21(r),t=$P(t),f=wj(f<<=t)):(f=HH(t),t=_C2(f),f=Vt(f=t-f),f=Cl2(t),t=bi1(f),r=j4(f=t===f)):M<64879?M<64763?(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Mu2(f),f=P5(t),t=jK2(r),r=Kd1(f&=t)):(f=uh(_),t=e[i++],f?(i+=t,v=zJ(f)):w=10,t=Ed([])):(f=$e2(s),t=Ny1(f),f=vJ(t=La2(f=t<=f)),t=sp(r),null!=(r=Mr1(s)).variablePool?n(r,t,f):r[t]=f,f=VW(r));else if(M<65831)M<65407?M<65025?M<64954?M<64896?(f=lk(r),t=sc1(f),f=VX(f=f<t),_=Uk2(r),f=sX(typeof _)):(f=xL(r),r=e[i++],--f[constantPool[r]],f=De1(z),t=M5(f),_=Yu1(f=f<=t)):M<64955?(_=wA2(f),t=F32(void 0),f=e[i++],t=ab2(constantPool[f])):(_=O72(r),s=Fd1(!_)):M<65112?M<65026?(f=PQ(u),t=Sl2(f),t=iK(f>>>=t),f=FD(u),t=e[i++],f?w=10:i+=t):(f=Tn1(z),t=lK2(f),_=rZ(f=f<=t),f=tl(r),t=mT2(f),f=wm2(f=f<t)):M<65113?(f=Bq2(s),t=Zf1(_),u=kz2(f-=t),f=kL(_),t=g1(w),u=KD(f=t<f)):(f=Vm(s),t=PZ2(_),u=uf1(f-=t),f=LC(t),t=Su1(f),r=fh2(f=t===f)):M<65680?M<65501?M<65408?(f=II(r),t=ed2(f),f=ZY2(f=f in t),f=mS(z),t=V12(f),_=Sx2(f=f<=t)):(f=n11(t),t=HN(f),r=Ba(f=t===f),t=lU2(h)):t=(M<65502?(f=Tu(t),t=FT2(f),f=E92(f+=t),f=n3(u),t=TQ(f),f>>>=t,aN2):(f=Kz2(t),t=Ix2(f),r=Gh(f/=t),f=vp2(s),t=fh(f),f=t<=f,W6))(f):M<65802?f=(M<65681?(_=lG2(r),f=b81(typeof _),f=pf2(t),t=ci1(f),f+=t,Sd2):(f=MQ2(t),t=YT(s),v=Hk(f=f instanceof t),f=g61(t),t=W_2(f),f+=t,F_1))(f):(f=e[i++],t=N72(r),r=Vs2(f),t[constantPool[f]]=r);else if(M<66095)if(M<65991)if(M<65883)if(M<65832)f=Qy1(z),t=qW(f),_=Uj2(f=f<=t),f=bE2(t),t=Fn1(f),f=CM2(f+=t);else for(f=gf2(t),t=Dm(f),f=Nv2(f=t-f),d=c.CFf,f=0;f<d;f++){let c=v=E_1(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<65884?(f=Ue2({}),f=jk2(_),t=R72(u),v=Ao1(f*=t)):(f=GF(t),t=BH(f),r=H9(f/=t),f=e[i++],t=pt(r),r=MN(f),t[constantPool[f]]=r);else M<66081?f=M<65992?(_=bt2(r),s=Lc1(!_),z01(t)):(f=pD2(r),t=cT2(f),n21(f=f in t)):(_=fu1(r),f=eu(~_),f=cg2(r),t=_q2(s),_=ki2(f^=t));else if(M<66353)M<66285?M<66096?(f=Zq2(_),t=sU(r),null!=(r=OC(f)).variablePool?n(r,t,f):r[t]=f,f=At2(t),t=st2(f),f=Tk(f=t-f)):(f=C01({}),_=Jc(t),t=mU(-_)):M<66286?(_=I5(f),t=VO(void 0),f=P32(r),t=fv2(s),_=fg2(f^=t)):(f=CT2(s),t=Li1(f),t=k6(f=t<=f),f=rt2(_),t=PY(u),f=so1(f=t!==f));else if(M<66392){if(M<66354)return f=$T2(r),t=gq(t),void(f=fO(f<<=t));f=P_2(t),t=oF(f),f=uk2(f+=t),f=oW2(t),t=Lb(f),f=jx2(f=t-f)}else f=zn(r),t=k91(s),_=e5(f>>=t),_=jp(f),t=dp2(void 0);else if(M<67619)if(M<66888)if(M<66682)if(M<66559)M<66542?s=(M<66492?(f=kB2(r),t=ZF2(t),f=KU(f<<=t),f=pP1(t),t=FZ2(f),f=t==f,Am2):(f=zp(z),t=ks2(f),_=dV(f=f<=t),f=yx(t),t=OW2(f),f=t==f,$N2))(f):M<66543?(f=ti2(r),t=Aa1(t),f=fU2(f<<=t),f=H72(t)):(f=Fv2(t),r=e[i++],f?i+=r:w=9,f=Ln1(t),t=EC(r),r=wl1(f&=t));else if(M<66627)M<66560?(_=uP1(r),f=TJ(~_),f=GT2(r),t=c7(s),_=yL2(f^=t)):(f=In(t),f=Ox(r),(t=jb2(f)).push(f),f=zv(t));else{if(!(M<66628))throw f=Z91(r),t=UD(r),t=U82(f=t<f),f=M21(t);f=Hc(),t=uV2(),Ep(f=t!=f),f=io(u),t=e[i++],f?w=10:i+=t}else if(M<66731)M<66706?f=M<66683?(f=b92(t),t=e$2(f),f=$61(f=t-f),lO(t)):(f=hX(r),t=eS(s),null!=(r=xg2(_)).variablePool?n(r,t,f):r[t]=f,f=Py2(r),t=jo2(f),RJ(f=f<t)):M<66707?(f=jo1(r),t=e[i++],f?w=10:(i+=t,s=$42(f)),t=f31(h)):(_=EW2(r),f=uF(typeof _),f=xu1(_),t=$G(u),f=It2(f=t!==f));else if(M<66887)M<66732?(f=Ct2(_),t=gD(u),v=vb1(f*=t),f=e[i++],t=Uu2(f)):(f=_62(u),t=YP2(f),t=K4(f>>>=t),f=zd2(u),t=C9(v),d=v_1(f|=t));else{for(d=c.CFf,f=0;f<d;f++){let c=v=la1(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=Lo2(r),t=SF(s),_=Ff1(f^=t)}else if(M<67157)if(M<67057)if(M<66932)M<66889?(f=nS2(t),t=xX2(f),f=M_(s=zM(f=t==f)),t=wI(f),t=l9(f=t<=f)):(f=oM2(t),t=DQ(r),f=Rs(r=bn(f&=t)),t=hT2(s),null!=(r=yn(_)).variablePool?n(r,t,f):r[t]=f);else if(M<66933){for(d=c.CFf,f=0;f<d;f++){let c=v=io1(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=o1(t),t=$B(r),r=yg2(f&=t)}else f=e2(r),t=R82(t),f=Qf2(f<<=t),f=v9(t),t=_F(f),s=Vu1(f=t==f);else M<67127?M<67058?(_=$A2(f),t=cv(void 0),t=Sc2([])):(_=nD2(r),s=_82(!_),f=sz2(t),t=EW(f),r=IH(f=t===f)):(f=fn2(t),t=zX(f),f=GU(r=cG2(f=t===f)),t=JT(f),f=uv(f=f<t));else if(M<67404)if(M<67203)M<67158?(f=s42(_),t=e[i++],f?(i+=t,v=DO(f)):w=10,f=Za1(t),t=h_(r),r=ir1(f&=t)):(f=S71({}),f=_R2(l=z));else if(M<67204)_=B21(t),t=lm(-_),f=KZ({});else for(f=MK2(z),t=Bc1(f),_=FT(f=f<=t),f=c.CFf,t=0;t<f;t++)w=Lj2(t),z[w]=o[t];else M<67541?M<67405?(f=e[i++],t=d_1(constantPool[f]),f=Ta2(t),t=Gz2(f),r=O12(f/=t)):(f=e[i++],t=A1(f),f=rv1(u),t=Uq(f),t=T12(f>>>=t)):(f=zF2(r),t=Ni(s),_=xI(f%=t),f=r22(u),t=e[i++],f?w=10:i+=t);else if(M<68139)if(M<67806)if(M<67718)if(M<67703)M<67620?(t=Gb2(h),f=ia2(s),t=Ow(_),u=El(f-=t)):(t=hl2(h),_=Ya1(r),f=FX2(~_));else{if(!(M<67704))return f=kc(_),t=Pb2(u),void(v=cx2(f*=t));f=n5(r),t=F22(s),f=MR(_=$12(f>>=t)),t=YE2(w),u=Q72(f=t<f)}else M<67805?M<67719?(t=d6([]),f=RO(_),t=QX(u),v=CB2(f*=t)):(_=xG(r),s=ue2(!_),f=u_(t),r=e[i++],f?i+=r:w=9):(f=Ec(t),t=le2(f),f=p42(f+=t),_=dc1(r),s=OE2(!_));else M<68073?M<67974?M<67807?(f=L3(r),t=wV2(r),t=nJ2(f=t<f),f=e[i++],i+=f):(f=x51(t),t=jc2(f),r=he(f/=t),f=Qv1(u),t=fc2(f),t=eR(f>>>=t)):t=M<67975?(f=qE2(t),t=PC2(s),v=$21(f=f instanceof t),_=vC2(f),Ox2(void 0)):(f=KX2({}),f=wl(u),t=cl1(f),fk(f>>>=t)):M<68134?M<68074?(f=ZH(t),t=mO(r),f=KN2(r=hA(f&=t)),t=wP(r),t=vw2(f=t<f)):(f=Fl1(t),t=Nv(f),s=my2(f=t==f),f=sO(t)):(f=Rc1(z),t=ev2(f),f=B0(_=s62(f=f<=t)),t=Uo(u),f=HP2(f=t!==f));else if(M<68365)if(M<68244)if(M<68180)if(M<68140)f=P_1(t),t=Og(f),f=QJ(r=Sw2(f=t===f)),r=e[i++],--f[constantPool[r]];else{for(d=c.CFf,f=0;f<d;f++){let c=v=rs1(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=Hf2({})}else if(M<68181)t=zO(h),f=pq2(t),r=e[i++],f?i+=r:w=9;else{f=IC({});{for(f=e[i++],r=I11(_),g=[],t=0;t<f;t++)g.splice(0,0,_D(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=Eo1(s)}}else if(M<68343)if(M<68245)f=lI2(r),t=pa(t),f=bk(f<<=t),f=rM2(r),t=El2(s),_=K32(f^=t);else{for(d=c.CFf,f=0;f<d;f++){let c=v=rW2(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=pb1(t)}else f=I_1(l=z),f=ll2(r),t=he1(s),_=Fw1(f^=t);else if(M<68683)if(M<68477)if(M<68366)throw f=ji(t);else f=nL2(z),t=k51(f),_=li2(f=f<=t),f=EE2(z),t=jS2(f),_=W4(f=f<=t);else M<68478?(f=dY(u),t=e[i++],f?w=10:i+=t,f=LB(_),t=d92(u),v=Yg(f*=t)):(f=T41(t),t=oI2(f),f=Tc1(f+=t),f=lZ(t),t=Gr(f),s=dv2(f=t==f));else M<68833?M<68684?(f=sQ(t),t=Ev1(f),f=Gn2(r=jB(f/=t)),t=o4(s),_=XM(f%=t)):(_=Co1(t),t=lg2(-_),f=PM(t),t=AO(f),s=Rl(f=t==f)):(f=j12(s),t=R52(f),t=vQ2(f=t<=f),f=Sx(u),t=$_1(f),t=TN(f>>>=t));else if(M<71658)if(M<70130)if(M<69645)M<69186?M<69020?M<68885?M<68836?(f=A92(s),t=A31(_),u=nu2(f-=t),f=MI2(r),t=G22(t),f=kz(f<<=t)):(f=cF2(r),t=kc2(s),_=Gt(f^=t),f=ck(u),t=ti1(v),d=Qw(f|=t)):M<68886?(f=xj2(z),t=Mv(f),_=Qs(f=f<=t),f=$m2(r),t=VP(s),_=$i2(f^=t)):(f=WC(r),t=la2(f),f=Um2(f=f<t),f=_e2(r),r=e[i++],--f[constantPool[r]]):M<69064?M<69021?(f=HA2(_),t=q51(w),u=QM2(f=t<f),_=nG2(r),f=IY2(~_)):(f=l$2(w),r=e[i++],f[constantPool[r]]+=1,f=B11(r),t=bu1(s),_=im2(f%=t)):M<69065?(f=Ml1(t),t=uT(f),r=yJ2(f/=t),f=SE(t),t=Eq(s),v=Ry(f=f instanceof t)):(f=Bu(u),t=Kr1(v),d=pv1(f|=t),_=Mb(r),f=n6(typeof _)):M<69508?M<69323?M<69187?(f=ep(t),t=TZ(f),f=lQ(r=sX2(f/=t)),t=yu2(f),f=Vo2(f=f<t)):(f=q8(t),f=l21(u),t=XZ(v),d=c82(f|=t)):M<69324?(f=dt2(s),t=f$(_),u=Xt(f-=t),f=Rh2(),t=xQ(),Eh(f=t!=f)):(f=oj2(r),(t=WV(f)).push(f),f=nK(t),f=Bh(u),t=sS(f),t=zb(f>>>=t)):M<69624?M<69509?(f=bn2(r),t=kH(s),_=m$(f^=t),f=du1(w),r=e[i++],f[constantPool[r]]+=1):(f=Kt(t),f=qJ2(r),t=Q41(f),r=delete t[f],f=q_1(r)):(t=Xu(h),f=fE2(u),t=Rn1(v),d=ES(f|=t));else if(M<69859)if(M<69690)if(M<69668)if(M<69646){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,ON()):s.splice(0,0,cH2());for(f=eC(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=y6(),t=xh(),s4(f=t!=f)}else f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=If(f),_=bF(f),t=ii2(void 0);else M<69669?(f=RU2(r),t=Rt2(f),f=mP2(f=f<t),f=nL(_),t=Fa1(u),v=PQ2(f*=t)):(f=ea(r),t=e[i++],f?w=10:(i+=t,s=ut2(f)));else M<69735?M<69691?(f=tB2(t),t=qO(r),r=IN(f&=t),f=UB2(_),t=d$(u),f=DZ(f=t!==f)):(f=yK(t),t=Qc2(f),r=cN(f=t===f),f=h7(s),t=Pt1(f),t=h42(f=t<=f)):(f=V81(r),t=JU2(s),_=yn2(f>>=t));else if(M<70010)if(M<69930)M<69860?(t=HB(h),f=bw(r),t=G_2(f),f=ht(f=f<t)):(f=ei2(_),t=I(u),v=$L(f*=t),f=Y32(u),t=gr(v),d=dl1(f|=t));else if(M<69931)f=Pm(r),t=U72(s),_=jJ(f>>=t),f=r8(w),r=e[i++],f[constantPool[r]]+=1;else{{for(f=e[i++],r=xa(_),g=[],t=0;t<f;t++)g.splice(0,0,qG(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=fP(s)}f=N_1(_),t=xM(u),f*=t,v=hr(f)}else M<70070?M<70011?(f=i31(r),t=mu2(t),f=Wn(f<<=t),f=$j(r),t=UA2(s),_=Rg(f%=t)):(f=vF2(t),t=HV(f),f=mJ(f+=t),f=ez(r),(t=VK2(f)).push(f),f=q2(t)):(f=wt1(s),t=qV(f),t=tb2(f=hw(t=c_1(f=t<=f))),s=n_1(f=t==f));else if(M<71028)if(M<70676)if(M<70312)if(M<70265){if(M<70131)return f=sa(r),t=It(f),void(f=uq2(f=f<t));for(f=c.CFf,t=0;t<f;t++)w=tE(t),z[w]=o[t];f=ro1(t),t=ry2(r),r=BF2(f&=t)}else M<70266?(f=sd1(t),t=i7(h)):(t=ae([]),f=q5(t),r=e[i++],f?i+=r:w=9);else if(M<70602)f=M<70313?(f=m52(z),t=ve(f),_=s_(f=f<=t),G_1(t)):(f=JR(_),t=KO(u),f=rw(f=t!==f),f=eh1(_),t=n7(u),LG(f=t!==f));else{f=dW2(s),t=dx2(f),f=t<=f,t=iS(f);{for(f in p=[],t=P61(t))p.push(f);z.for_in_xh_cbb_list=p}}else if(M<70936)if(M<70757)if(M<70677)for(f=YX(u),t=j62(f),t=ge2(f>>>=t),d=c.CFf,f=0;f<d;f++){let c=v=SA(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else f=AJ(t),t=v82(r),r=VI(f&=t),f=KS2(s),t=Ts(_),u=rt1(f-=t);else M<70758?(f=wJ2(t),t=S31(f),r=yY2(f=t===f),f=X52(),t=Lf2(),ad1(f=t!=f)):(f=Ro(u),t=e[i++],f?w=10:i+=t,f=gh1(_),t=tr2(w),u=DS(f=t<f));else M<70972?M<70937?(f=g01(_),t=iF(w),u=cd2(f=t<f)):(f=G8(r),t=u11(s),_=yu1(f>>=t),f=qI(r),t=Sz(r),t=Dv(f=t<f)):(f=x41(u),t=E3(v),d=QO2(f|=t),f=xT2(t),t=Nt1(f),s=uy2(f=t==f));else if(M<71306)M<71136?M<71093?M<71029?(f=O4(r),t=UQ2(f),f=ut1(f=f<t),f=uA(r),t=w02(s),_=fG(f^=t)):(_=mZ(f),t=dw(void 0),f=Wr(t),t=Rq(f),r=NV2(f/=t)):M<71094?(f=wr2(z),t=w31(f),_=j92(f=f<=t),f=Ie1(t),t=Mi2(r),null!=(r=Ub(s)).variablePool?n(r,t,f):r[t]=f,f=P81(r)):(f=pp2(r),t=a21(s),null!=(r=eE2(_)).variablePool?n(r,t,f):r[t]=f,f=td1(r),t=Xr1(s),_=RZ2(f^=t)):M<71268?M<71137?(f=s31(t),t=hj(f),r=z31(f/=t),f=tE2(t),t=k72(f),s=LA(f=t==f)):(f=sy1(t),t=CS(f),r=VJ(f/=t),_=w5(t),t=MU(-_)):(f=ZQ2(t),t=S62(f),s=MX2(f=t==f),_=l01(r),s=b42(!_));else if(M<71527)if(M<71341)if(M<71307)f=c1(r),r=e[i++],--f[constantPool[r]],f=z2(t),t=Ks2(f),f=Nt2(f+=t);else{f=VI2(s),t=B12(f),f=t<=f,t=lp(f);for(f=c.CFf,t=0;t<f;t++)w=dr(t),z[w]=o[t]}else if(M<71342)f=Bw2(r),(t=Kp2(f)).push(f),f=Pe(t),f=L52(_),t=Rr1(w),u=B6(f=t<f);else{f=RW(t),t=xc(f),f=t-f,f=A01(f);{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,iQ2()):s.splice(0,0,GY());for(f=S61(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}}else M<71624?M<71528?(f=nA2(r),t=b02(s),_=Fr1(f>>=t),t=RE2([])):(f=iY2(t),t=$s2(r),null!=(r=W41(s)).variablePool?n(r,t,f):r[t]=f,f=tr(r),f=XX2(r),t=H11(s),_=P41(f%=t)):(t=C41(h),f=Q51(t),t=iA(f),s=pu(f=t==f));else if(M<73566)if(M<72563)if(M<72186)if(M<71817)if(M<71718)if(M<71659){f=wq(t),t=Uz2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=I41(f),f=H$2(t),t=bt1(f),f=rr1(f=t-f)}else f=zc1(r),t=kL2(f),r=delete t[f],f=Wd1(r),f=tZ2(r),t=y8(r),t=rL(f=t<f);else if(M<71719){t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=_C(_),t=Q$2(u),f=by1(f=t!==f)}else{f=Kd2(t),t=eB2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=mi(f),t=oe([])}else M<72115?M<71818?(_=Sj2(f),t=GX(void 0),f=Rg2(r),t=vu2(f),f=Bj2(f=f in t)):(f=US(s),t=cW2(_),u=$1(f-=t),_=u_2(f),t=Da(void 0)):u=(M<72116?(function(){for(f=c.CFf,t=0;t<f;t++)w=A22(t),z[w]=o[t]}(),f=D5(_),t=Ty(w),f=t<f,Qd):(f=yG2(),t=L1(),WH2(f=t!=f),f=zr1(_),t=te(w),f=t<f,Ki2))(f);else if(M<72389)if(M<72238)if(M<72187)f=F_({}),_=Zo(r),f=Ft2(typeof _);else{{for(f=e[i++],r=Km2(_),g=[],t=0;t<f;t++)g.splice(0,0,G51(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=y9(s)}f=d82(r),t=g62(s),f^=t,_=RP(f)}else if(M<72239){for(f=c.CFf,t=0;t<f;t++)w=UA(t),z[w]=o[t];f=DK(r),t=H32(r),f=t<f,t=Bv2(f)}else f=sw1(r),t=B51(r),t=Wz(f=t<f),f=Io2(s),t=Ew1(_),u=lz2(f-=t);else if(M<72451)if(M<72390){f=Jb2(t),t=mj2(f),f=t-f,f=F61(f);{f=MC(t),t=Wb1(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Qz2(f)}}else f=ej(r),t=kw(r),t=Kl2(f=t<f),t=eD2(h);else f=Le1(_),t=nf(w),u=Gp2(f=t<f),f=Up2(r),t=Mh2(f),f=eP2(f=f in t);else if(M<72871)if(M<72608)if(M<72583)M<72564?(f=jG(t),t=eU2(r),null!=(r=ij(s)).variablePool?n(r,t,f):r[t]=f,f=Kg2(r),t=jz2([])):(t=vh2([]),f=PR2(w),r=e[i++],f[constantPool[r]]+=1);else if(M<72584){f=J7(t),t=oA2(f),r=Jd1(f/=t),f=Iv1(t),t=cB2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=sh(f)}else f=yO2(t),f=Z21(t),t=Yd2(f),f=bo1(f=t-f);else M<72791?M<72609?(f=E$(z),t=$Z2(f),_=p_1(f=f<=t),_=Lt2(f),t=Gi2(void 0)):(_=ha1(r),s=LI(!_),f=F9(l=z)):(f=vV(r),t=kv(f),f=V4(f=f<t));else if(M<73109)if(M<73062){if(M<72872)throw f=GK2(t);f=S2(r),t=jD2(f),r=delete t[f],f=Vr2(r),t=LV(h)}else M<73063?(f=e[i++],i+=f,f=zD(),t=Vp(),O8(f=t!=f)):(f=Kw(s),t=vI2(f),f=Ua(t=B_1(f=t<=f)),r=e[i++],f?i+=r:w=9);else if(M<73397)if(M<73110)_=zE2(r),s=BO(!_),f=g5(t),r=e[i++],f?i+=r:w=9;else{for(f=Bg(t),t=wb(r),r=l8(f&=t),z.for_in_xh_cbb_list=p,f=e[i++],r=j22(_),g=[],t=0;t<f;t++)g.splice(0,0,ki1(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Ep2(s)}else f=Qh2(t),t=HT(r),r=cF(f&=t),f=wv(u),t=e[i++],f?w=10:i+=t;else if(M<74154)if(M<73911)if(M<73604)if(M<73574)M<73567?(f=RR(_),t=de2(u),f=c11(f=t!==f),f=Sh1(z),t=hY2(f),_=LH(f=f<=t)):(f=N91(z),t=rJ(f),_=IV(f=f<=t),f=FR2(r),t=fY(r),t=gi1(f=t<f));else if(M<73575){for(f=_r2(r),t=Rj2(t),f=UO(f<<=t),z.for_in_xh_cbb_list=p,f=e[i++],r=Uf1(_),g=[],t=0;t<f;t++)g.splice(0,0,A6(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Hd2(s)}else f=ta(z),t=qR(f),_=S91(f=f<=t),f=Ti(t),t=fK2(f),r=fH2(f=t===f);else M<73741?M<73605?(f=I52(s),t=s8(_),u=o81(f-=t),_=IG(f),t=mR2(void 0)):(f=ma(_),t=Qn1(w),u=O_2(f=t<f),y=Xg(f),null!=(P=K41(t)).variablePool?n(P,y,v):P[y]=f):(t=Jo1(h),f=Zz(t),t=D11(f),s=mF(f=t==f));else if(M<74054)M<73961?M<73912?(f=lb2(t),t=gn(f),r=M9(f=t===f),f=Yd(u),t=e[i++],f?w=10:i+=t):(t=lc(h),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=RN(f)):s=(M<73962?(f=Ux(u),t=p62(f),f=(t=zr(f=Ws1(t=wd1(f>>>=t))))==f,F21):(_=wz2(f),t=LE(void 0),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),wy1))(f);else if(M<74112){if(M<74055)return f=a2(z),t=j9(f),void(_=Oe2(f=f<=t));f=Od2(),t=yl1(),Vv2(f=t!=f),f=hc2(z),t=gH2(f),_=m61(f=f<=t)}else f=e[i++],t=gh2(constantPool[f]),f=uP(r),t=OR(s),_=sV2(f>>=t);else if(M<74556)if(M<74306)M<74250?M<74155?(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Ov1(f),f=WD(u),t=gk2(v),d=hY(f|=t)):(f=VD(t),f=e[i++],i+=f):M<74251?(t=Ys(h),f=UZ(r),t=Je1(r),t=VF(f=t<f)):(f=y71({}),f=h32(t),t=Xk(s),v=I_2(f=f instanceof t));else if(M<74521)if(M<74307)f=Ny2(r),t=t21(s),_=Ba2(f>>=t),f=YT2(t),t=YF2(f),r=uT2(f=t===f);else{f=vz(u),t=bq(f),t=IN2(f>>>=t),f=II2(t),t=P02(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=KI2(f)}else f=ET2(t),t=Sl(f),f=bx(f=t-f),f=nz2(w),r=e[i++],f[constantPool[r]]+=1;else M<74677?M<74635?M<74557?(f=HL2(r),t=x01(s),_=ka2(f%=t),f=l52(t),t=H_1(f),r=bC2(f/=t)):(f=cX2(t),t=sI2(f),f=qe2(f=t-f),f=Ml2(_),t=P9(w),u=sS2(f=t<f)):M<74636?(f=I8(_),t=fW2(w),u=Bb(f=t<f),f=ro(r),t=Jm2(f),f=h0(f=f<t)):(f=Ay1(r),t=wr1(r),t=$y(f=t<f),_=BA(f),t=mr(void 0)):M<74720?(M<74678?(f=G9(r),t=fC2(f),f=zY2(f=f<t),function(){for(f=e[i++],r=Wm2(_),g=[],t=0;t<f;t++)g.splice(0,0,An1(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=gi2(s)}):(f=Gv(u),t=AZ(f),t=Qw1(f>>>=t),function(){throw f=XC(t)}))():(f=IK(r),t=e[i++],f?w=10:(i+=t,s=nI(f)),f=$T(u),t=An(v),d=dT(f|=t));else if(M<86949)if(M<80796)if(M<77519)if(M<76204)if(M<75381)if(M<75145)if(M<75009)if(M<74880)if(M<74759)f=yP1(r),t=hi(t),f=Ln(f<<=t),f=vl2(u),t=f41(f),t=nS(f>>>=t);else{f=UC2(_),t=$v2(w),u=pG(f=t<f),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else t=(M<74881?(t=BP1(h),f=D62(s),t=F92(f),f=t<=f,Lc):(f=e[i++],t=DH2(r),r=L$(f),t[constantPool[f]]=r,f=jZ(u),t=iX2(f),f>>>=t,TU))(f);else M<75085?f=M<75010?(f=e[i++],t=D$2(r),r=er1(f),t[constantPool[f]]=r,f=kU2(t),t=Nn(f),Xk2(f+=t)):(f=Do2(_),t=Jl1(u),f=rc2(f=t!==f),bb1(t)):M<75086?(f=Ba1(t),t=Oa(f),s=Tf(f=t==f),f=SR(t),t=tz(f),r=u7(f/=t)):(y=be1(f),null!=(P=o52(t)).variablePool?n(P,y,v):P[y]=f,t=FK2([]));else if(M<75313)if(M<75284)if(M<75146){for(f in f=r21(u),t=YI2(v),d=J92(f|=t),p=[],t=jV2(t))p.push(f);z.for_in_xh_cbb_list=p}else f=_p(r),t=_c2(s),_=Yc(f>>=t),f=ky1(t),t=Fs(f),f=Fj2(f+=t);else M<75285?(f=DQ2(_),t=qh1(u),v=D21(f*=t),f=Ff2(t)):(f=DD(r),t=e[i++],f?w=10:(i+=t,s=tx2(f)),f=zS2(),t=TC2(),qX2(f=t!=f));else M<75366?M<75314?(f=S81(s),t=M11(f),t=h_2(f=k5(t=BS2(f=t<=f))),r=M12(f=t===f)):(_=Ce(r),s=gr1(!_),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=lF(f)):(f=IP(r),t=Kr(s),_=XA(f>>=t),f=yn1({}));else if(M<75937)M<75559?M<75467?M<75382?(f=cO2(r),t=sR(s),_=lS2(f>>=t),f=w12(t),t=Q8(f),f=_H(f+=t)):(f=GH(u),t=c72(v),d=gu(f|=t),f=e[i++],t=q$2(r),r=yc2(f),t[constantPool[f]]=r):f=M<75468?(f=BQ(s),t=Od1(f),f=_W(t=w_(f=t<=f)),t=D12(r),null!=(r=Gl(s)).variablePool?n(r,t,f):r[t]=f,QI(r)):(_=Rc2(r),f=wk2(typeof _),ss1({})):M<75919?M<75560?(f=e[i++],t=qr1(r),r=Zn1(f),t[constantPool[f]]=r,_=vH(f),t=s_2(void 0)):(f=ew(r),t=Of(f),f=b12(f=f<t),_=DC2(r),s=Vz(!_)):(_=J8(r),s=CE2(!_),f=Jz2(s),t=RA(_),u=oc(f-=t));else if(M<76035)if(M<76026)if(M<75938)f=_s(u),t=MT2(v),d=uj2(f|=t),f=FZ(_),t=N52(u),v=E1(f*=t);else{{for(f=e[i++],r=j8(_),g=[],t=0;t<f;t++)g.splice(0,0,Ls(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=WP(s)}f=hP1(t),t=tY(f),f+=t,f=XW2(f)}else f=M<76027?(f=V41({}),f=kJ(r),t=jf(f),J_(r=delete t[f])):(f=Sd(_),t=ip(w),u=Sb1(f=t<f),b7({}));else M<76159?M<76036?f=vP2({}):(f=e[i++],i+=f,_=o_(r),s=h8(!_)):(f=e[i++],t=ET(f),f=Hb1(t));else if(M<76745)if(M<76508)if(M<76321)M<76288?M<76205?(f=Vv1(t),t=kC(f),r=VM2(f/=t),f=LJ(l=z)):(f=mz2(t),t=U71(f),f=J61(f+=t),f=NE(r),t=yj2(s),_=EG(f>>=t)):M<76289?(f=qc(s),t=hl1(_),u=Qd2(f-=t),f=EU(t),t=cN2(r),r=Nn2(f&=t)):(_=fq2(r),f=gC2(~_),f=KH2(s),t=kF2(f),t=nU2(f=t<=f));else if(M<76385)if(M<76322){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,YZ2()):s.splice(0,0,wH());for(f=w9(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=XP2({})}else f=B71(t),t=Hw1(s),v=h71(f=f instanceof t),f=YM2(s),t=p31(_),u=IW(f-=t);else if(M<76386){for(z.for_in_xh_cbb_list=p,f=e[i++],r=kf(_),g=[],t=0;t<f;t++)g.splice(0,0,ds1(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=HK(s),t=iu2([])}else f=ep2(_),t=YG(w),f=QB(u=iG2(f=t<f)),t=e[i++],f?w=10:i+=t;else M<76639?M<76554?M<76509?(_=Qe2(f),t=RC(void 0),_=Hc1(f),t=Qz(void 0)):(f=_E(z),t=tR(f),_=ai1(f=f<=t),f=pU(u),t=ML(v),d=hI(f|=t)):M<76555?(f=Is2(r),t=yC(s),_=gj2(f^=t),_=dX2(r),s=Qg2(!_)):(f=bD2(r),t=t11(s),_=Lf(f^=t),f=fy1(r),t=BH2(s),null!=(r=nw1(_)).variablePool?n(r,t,f):r[t]=f):M<76699?M<76640?(f=j61(r),r=e[i++],--f[constantPool[r]],f=JJ2(t),t=No(r),r=H2(f&=t)):(t=Nt([]),f=NH2(u),t=Jd(f),t=nT2(f>>>=t)):(f=OP(r),t=rl(s),_=bc2(f>>=t),f=dP(r),t=J1(s),_=jj2(f>>=t));else if(M<77151)M<77007?M<76963?M<76746?(f=LK(t),t=c22(f),s=sR2(f=t==f),f=lw1(r),t=QB2(s),_=a8(f^=t)):(f=NP(r),t=d02(s),_=hh1(f^=t),f=VC2(r),t=NT(f),f=mT(f=f<t)):M<76964?(f=O$(u),t=M41(f),t=cO(f>>>=t)):(f=P82(t),t=Ry2(f),s=xb2(f=t==f),f=n8({})):M<77117?M<77008?(f=c52(t),t=pH2(f),r=nj2(f=t===f)):(f=sL(t),t=yv1(f),f=Nf2(f=t-f),f=Sa2(_),t=Tf1(u),v=ZP2(f*=t)):(f=zo1(_),t=eE(u),f=LO2(f=t!==f),_=M91(t),t=V62(-_));else if(M<77397)if(M<77361)M<77152?(f=NX2(),t=Js1(),Ax2(f=t!=f),f=y3(t),t=UK(f),r=no2(f/=t)):(f=t42(r),t=nd(s),f=iN2(_=dS2(f%=t)),t=px2(u),f=N_(f=t!==f));else if(M<77362)f=W7(_),t=e[i++],f?(i+=t,v=gR2(f)):w=10,t=_l2(h);else{f=Ot(t),t=br1(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=tY2(f),f=T72(r),t=_t2(s),_=Cu(f%=t)}else if(M<77425)if(M<77398){f=W(t),t=zT(f),f=t==f,s=t91(f);{for(f in p=[],t=xv(t))p.push(f);z.for_in_xh_cbb_list=p}}else _=Kx2(f),t=Hx(void 0),f=Gw2(z),t=mP(f),_=vS2(f=f<=t);else f=QS(_),t=uo1(u),f=Uv1(f=t!==f),f=$r(r),t=L02(s),_=IM2(f^=t);else if(M<79180)if(M<78671)M<78302?M<77946?M<77765?f=M<77520?(_=Tv(r),s=m21(!_),TU2({})):(f=A51({}),f=Hl1(r),(t=u21(f)).push(f),CI(t)):M<77766?(f=e[i++],i+=f,f=dT2(t),t=u1(f),f=HN2(f=t-f)):(f=Mr(u),t=as(v),d=ur(f|=t),f=Rv1(t),t=V02(f),r=Fm(f=t===f)):M<78039?M<77947?(f=wC2(r),t=ni(s),_=SK2(f>>=t),f=G72(),t=h$(),i2(f=t!=f)):(f=vW2(_),t=e[i++],f?(i+=t,v=fO2(f)):w=10,f=sW2(s),t=bn1(_),u=Cr2(f-=t)):M<78040?(_=YU(r),s=$a(!_),_=jt1(f),t=k11(void 0)):(f=ne1(t),t=D82(r),r=Rd1(f&=t),f=Ur(t),t=MD2(s),v=q_(f=f instanceof t)):M<78323?M<78309?M<78303?(f=VF2(r),t=Jw2(s),_=Kb(f^=t),f=$a2(t),t=Ra1(f),f=Z01(f+=t)):(f=TJ2(s),t=aN(_),u=c2(f-=t),f=e52(t),t=gP(f),r=St2(f/=t)):M<78310?(f=Ct(r),t=qP(s),f=M72(_=ot1(f>>=t)),t=Rf1(r),null!=(r=Y(f)).variablePool?n(r,t,f):r[t]=f):(f=ky2(t),t=mU2(f),s=d2(f=t==f),_=WF2(r),f=WO2(typeof _)):M<78621?M<78324?(f=kg(s),t=Re(_),u=fw1(f-=t),f=M32(t),t=Af(f),r=BN2(f/=t)):(f=QF(r),t=Ty1(s),_=VJ2(f>>=t),f=vo(t),r=e[i++],f?i+=r:w=9):(f=fJ2(_),t=YO(w),u=x2(f=t<f),f=WE2(r),t=PT2(s),_=x5(f>>=t));else if(M<78803)M<78762?M<78731?M<78672?(f=jx(t),t=q71(r),r=dA(f&=t),f=l6(),t=rK2(),MP(f=t!=f)):(f=jA2(u),t=A32(f),t=pl(f>>>=t),f=LY2(u),t=$X2(f),t=p81(f>>>=t)):M<78732?(f=t_(),t=Hf1(),r$2(f=t!=f),f=Fb(t),t=aj(r),r=G12(f&=t)):(f=K5(u),t=k$(f),t=rb(f>>>=t),f=fo1({})):M<78800?M<78763?(f=OW(_),t=xZ2(w),u=Iz2(f=t<f),f=e[i++],t=oU(constantPool[f])):(f=e[i++],t=v4(f),f=$f1(t)):(f=js(_),t=dJ2(w),u=Kk(f=t<f),f=kQ(_),t=e[i++],f?(i+=t,v=Vy2(f)):w=10);else if(M<79038)if(M<78977)if(M<78804)_=o92(f),t=bi2(void 0),f=e[i++],t=ID2(r),r=z91(f),t[constantPool[f]]=r;else for(_=kS2(r),s=oJ(!_),d=c.CFf,f=0;f<d;f++){let c=v=fd(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<78978?(f=ov1(r),t=zm(t),f=Lh(f<<=t),f=HH2(r),t=yt1(r),t=Y22(f=t<f)):(t=S11(h),f=Hv(t),t=q21(f),f=pj(f+=t));else M<79128?M<79039?(f=ys2(z),t=mi2(f),_=tL(f=f<=t),f=e[i++],i+=f):(f=XX(u),t=YY2(f),f=sd(f=(t=P1(f=Dd2(t=NG(f>>>=t))))-f)):(f=e[i++],t=ic2(f));else if(M<79914)if(M<79617)if(M<79350)if(M<79248){if(M<79181)return f=oo2(z),t=Ap2(f),void(_=J11(f=f<=t));f=wf(t),t=_B2(f),r=ls1(f=t===f),f=oH2(t),t=tu2(f),f=Xw2(f+=t)}else M<79249?(f=CG(r),t=t62(s),null!=(r=vP1(_)).variablePool?n(r,t,f):r[t]=f,f=xo2(_),t=h11(u),v=QU(f*=t)):(f=e[i++],t=KR2(r),r=xd(f),t[constantPool[f]]=r,f=T91(r),t=Pv1(f),f=EL(f=f<t));else M<79474?t=M<79351?(f=tw2(r),t=QT2(f),f=_x(f=f<t),Sk2([])):(f=hG(r),t=A8(r),t=$w2(f=t<f),f=mD2(u),t=Ho1(f),lo(f>>>=t)):(f=pn1(_),t=GC(w),u=mf(f=t<f),f=vq(_),t=Hz2(u),f=uy1(f=t!==f));else if(M<79868)if(M<79722)if(M<79618)f=Is(u),t=vs2(f),t=rN2(f>>>=t),f=Jt2(r),t=f61(s),_=Qv(f>>=t);else{{for(f in p=[],t=nn1(t))p.push(f);z.for_in_xh_cbb_list=p}t=C0([])}else M<79723?(f=Ki(t),t=TE(f),f=lP(f=t-f),f=TO2(r),t=db1(r),t=HS2(f=t<f)):(f=za2(t),r=e[i++],f?i+=r:w=9,f=F2(s),t=$I(_),u=SM2(f-=t));else M<79898?M<79869?(f=dX(t),t=AG2(f),r=Ah(f=t===f),f=sM2(t),t=dR(f),r=tS(f/=t)):(f=Uc2(r),t=pZ(s),_=ym2(f^=t),f=Eo2(s),t=GJ(_),u=U11(f-=t)):(f=eN2(u),t=Q52(f),t=Pr1(f>>>=t),f=HM2(s),t=hq2(f),t=kt2(f=t<=f));else if(M<80343)if(M<80129)if(M<79994)M<79915?(f=e[i++],t=rw1(f),f=Pq(r),t=Td(s),_=ce1(f^=t)):(f=hi1(r),t=m41(s),_=Ne(f>>=t),f=I$(t),t=De(f),r=zm2(f/=t));else if(M<79995)f=Z1(r),t=zl1(s),_=ra(f%=t),f=Wa2(),t=IF(),at(f=t!=f);else{for(f=c.CFf,t=0;t<f;t++)w=uD(t),z[w]=o[t];f=dv1(_),t=Qy2(u),v=Q21(f*=t)}else M<80250?M<80130?(f=u32(_),t=kl1(u),f=ad(f=t!==f),f=vs(l=z)):(f=eK(u),t=LX(v),d=Ms2(f|=t),f=vn1(u),t=e[i++],f?w=10:i+=t):(f=Ex2(t),t=V0(f),r=Fj(f=t===f),f=Ak2(_),t=e[i++],f?(i+=t,v=XL2(f)):w=10);else if(M<80712)if(M<80570)if(M<80344)f=v12(t),t=KD2(f),r=bD(f/=t),f=jF(w),r=e[i++],f[constantPool[r]]+=1;else{for(d=c.CFf,f=0;f<d;f++){let c=v=pA2(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=yD(r),t=yX(s),_=oN(f^=t)}else M<80571?(f=Pj(_),t=B61(u),f=X71(f=t!==f),f=oM(r),(t=li(f)).push(f),f=Vy1(t)):(f=EM(_),t=fi1(r),null!=(r=IF2(f)).variablePool?n(r,t,f):r[t]=f,f=BG2(r),t=YS(s),_=JB2(f>>=t));else if(M<80727)if(M<80713)f=i52(u),t=OJ2(f),t=rV(f>>>=t),f=gW(r),t=LL(s),_=$o2(f%=t);else{for(f in f=g4(r),t=Zc2(r),t=hE2(f=t<f),p=[],t=WB2(t))p.push(f);z.for_in_xh_cbb_list=p}else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=xD(t),t=y81(r),r=DA2(f&=t)}else if(M<83858)if(M<82078)if(M<81348)if(M<80983)if(M<80895)M<80862?M<80797?(f=e[i++],t=xV(constantPool[f]),f=Tu2(r),t=_W2(r),t=Uh(f=t<f)):(f=ip2(_),t=tk2(r),null!=(r=C_1(f)).variablePool?n(r,t,f):r[t]=f,f=Xu2(_),t=Q3(u),f=rH2(f=t!==f)):M<80863?(f=_w2(s),t=jX2(f),t=ZK(f=t<=f),f=yl2(_),t=Bd(u),f=YJ(f=t!==f)):(f=sG(_),t=bl2(u),v=bu2(f*=t),f=zt(u),t=tX(f),t=_i2(f>>>=t));else if(M<80958)if(M<80896)throw f=Bf1(t),t=wK2(r),r=JB(f&=t),f=Ph2(t);else f=pf(t),t=As(r),r=K$(f&=t),f=sd2(t),t=cw1(f),r=d_(f=t===f);else M<80959?(f=bP2(),t=RP2(),p_2(f=t!=f),f=eY2(r),t=e[i++],f?w=10:(i+=t,s=pO(f))):(f=d_2(t),t=Ej2(f),s=V52(f=t==f),f=e12(t),t=Bb1(f),s=Sq2(f=t==f));else if(M<81324)if(M<81055)if(M<80984){f=Rs1(r),t=M52(s),_=_n(f>>=t),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else f=Wu2(r),t=r12(s),null!=(r=QL2(_)).variablePool?n(r,t,f):r[t]=f;else if(M<81056){{for(z.for_in_xh_cbb_list=p,f=e[i++],r=Bg2(_),g=[],t=0;t<f;t++)g.splice(0,0,Fi2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=SD(s)}f=IO(_),t=Vf1(u),f*=t,v=O31(f)}else f=Wd2(r),r=e[i++],--f[constantPool[r]],f=sg2(r),t=Dl2(r),t=sH2(f=t<f);else M<81345?f=(M<81325?(f=wu(t),t=ud(f),f=kY(f+=t),f=fl(r),t=Fs2(t),f<<=t,RI2):(f=rk(r),t=Qf1(t),f<<=t,bA2))(f):(f=Ik(t),t=GJ2(r),r=Zj2(f&=t));else if(M<81630)M<81501?M<81472?M<81349?(f=ld2(s),t=LC2(_),f=s72(u=Zv(f-=t)),t=TP2(f),t=zA(f>>>=t)):(f=eY({}),f=QJ2(_),t=F82(u),v=ya(f*=t)):M<81473?(f=gl1(t),f=J42(r),t=DI(t),f=TR2(f<<=t)):(f=PJ(t),t=JO(f),s=A(f=t==f),f=ay(w),r=e[i++],f[constantPool[r]]+=1):M<81616?f=(M<81502?(function(){for(f=c.CFf,t=0;t<f;t++)w=WW2(t),z[w]=o[t]}(),f=Vg2(t),t=un2(f),f=t-f,N32):(t=yt2(h),f=dq(r),t=BU2(f),f=f<t,wy))(f):(f=m82(r),t=ct1(s),_=O42(f>>=t),f=Wb(t),t=lJ(f),r=nc1(f=t===f));else if(M<81799)if(M<81668)if(M<81631){f=di1(t),t=Lg(f),f=t===f,r=ZB2(f);{for(f in p=[],t=ps2(t))p.push(f);z.for_in_xh_cbb_list=p}}else f=CT(t),t=BI2(f),r=Vu(f=t===f),f=Dv1(u),t=q11(f),t=Oy(f>>>=t);else f=M<81669?(f=vs1(u),t=Ol1(f),h41(t=tv(f>>>=t))):(f=va1(t),t=K02(r),r=Bd1(f&=t),f=hT(_),t=mb2(u),PH2(f=t!==f));else M<82077?M<81800?(f=qM(t),t=Ac(r),r=uS(f&=t),f=e[i++],t=GM2(f)):(f=$b2(r),t=T5(r),t=zM2(f=t<f),f=br(z),t=JQ2(f),_=BN(f=f<=t)):(f=dn2(t),t=Hl2(r),null!=(r=pn(s)).variablePool?n(r,t,f):r[t]=f,f=ay1(r),f=Nl(r),t=rs2(r),t=ng(f=t<f));else if(M<83013)if(M<82406)if(M<82315)if(M<82265)u=(M<82079?(_=xi2(r),f=qd1(~_),f=Sv1(_),t=fT2(w),f=t<f,sq2):(f=Vp2(_),t=ZX(u),v=jq(f*=t),f=ys(s),t=C82(_),f-=t,Dn2))(f);else{if(!(M<82266))return f=mI(r),t=LZ(r),void(t=y62(f=t<f));f=Cq({}),f=N71(s),t=vP(f),t=xe2(f=t<=f)}else if(M<82402)if(M<82316)f=Y51(_),t=X_(u),v=Ms1(f*=t),f=so(_),t=wG2(r),null!=(r=Ch1(f)).variablePool?n(r,t,f):r[t]=f;else{{f=IJ(t),t=PM2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=jI2(f)}_=wB2(f),t=$c2(void 0)}else f=IB(t),t=G62(s),v=TT(f=f instanceof t),f=$l(u),t=ha2(v),d=it2(f|=t);else if(M<82521)M<82485?M<82407?(f=y$(w),r=e[i++],f[constantPool[r]]+=1):(f=Ku2(s),t=HP1(_),u=$5(f-=t),f=Fu(s),t=Ue1(_),u=Bv(f-=t)):M<82486?(f=lu2(u),t=_Y2(v),d=lM(f|=t),t=XZ2([])):(f=XB2(u),t=e[i++],f?w=10:i+=t,f=Oj(_),t=Qw2(u),v=di2(f*=t));else if(M<82913)if(M<82522)_=Hr2(r),f=Vi2(typeof _),f=Xi2(t);else{for(z.for_in_xh_cbb_list=p,f=e[i++],r=D7(_),g=[],t=0;t<f;t++)g.splice(0,0,n42(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=vp(s),t=Bt(h)}else f=rn1(t),t=$Y(r),r=pe2(f&=t),f=Oj2(t),t=E41(f),s=qL2(f=t==f);else if(M<83340)if(M<83270)if(M<83031)M<83014?(t=b01(h),f=Gc1(r),t=q0(s),_=Uj(f%=t)):(f=DT(z),t=Nx(f),_=c$(f=f<=t),f=hD2(w),r=e[i++],f[constantPool[r]]+=1);else if(M<83032){{for(f in p=[],t=_T(t))p.push(f);z.for_in_xh_cbb_list=p}f=Ht(r),t=EE(s),f^=t,_=vD2(f)}else f=Yz2(z),t=Rf(f),f=Ub2(_=jN(f=f<=t)),t=Tw1(u),v=rt(f*=t);else M<83307?M<83271?(f=sE2(t),t=Np(f),f=S21(f+=t),f=w81(r),t=qa1(s),_=k61(f>>=t)):(f=YD(r),t=hp2(t),f=nD(f<<=t),f=JR2(r),t=Wl(r),t=IB2(f=t<f)):(y=zL(f),null!=(P=yq2(t)).variablePool?n(P,y,v):P[y]=f);else if(M<83813)if(M<83614)M<83341?(_=hh2(r),f=iB2(typeof _),t=GE(h)):(f=mQ2(r),t=gI2(r),t=bo(f=TN2(t=j71(f=t<f))),r=C02(f/=t));else if(M<83615)f=W$(u),t=$P1(f),t=bK2(f>>>=t),f=F52(r),t=ID(r),t=i4(f=t<f);else{f=Wr2(t),t=UQ(f),f=t-f,f=et1(f);throw f=$U(t)}else M<83848?M<83814?(f=xC2(t),t=fn1(f),r=f0(f=t===f),f=CX2(t),t=x_2(f),f=A12(f+=t)):(f=Ft({}),f=GZ2(r),t=GA(r),t=Dh(f=t<f)):(f=Yl(r),(t=na1(f)).push(f),f=bJ2(t),f=a41(_),t=xj(u),v=AE2(f*=t));else if(M<85353)if(M<84670)if(M<84210)if(M<83965)if(M<83916)if(M<83859)f=Dr1(t),t=$(s),v=ph1(f=f instanceof t),t=UV2(h);else for(f=Yp2({}),d=c.CFf,f=0;f<d;f++){let c=v=P8(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else M<83917?(f=ra1({}),f=IZ(r),t=cj(s),_=M_2(f>>=t)):(_=t2(f),t=sr1(void 0),f=kf1(t),t=AE(f),f=iw(f=t-f));else M<84028?M<83966?(f=pc2(r),t=IS(f),r=delete t[f],f=XE2(r),f=_o2(t)):(f=h$2(z),t=z_(f),_=cA(f=f<=t),_=n61(r),s=Jf2(!_)):M<84029?(f=N4(t),t=zb1(f),f=Eu1(f=t-f),f=v_2(w),r=e[i++],f[constantPool[r]]+=1):(f=Sh2(r),(t=sy(f)).push(f),f=sK(t),_=hK(f),t=ZU2(void 0));else if(M<84573)if(M<84302)t=(M<84211?(f=cb(z),t=uD2(f),_=Bo1(f=f<=t),f=yd1(s),t=NF2(f),f=t<=f,dN):(f=V82(r),(t=Rh1(f)).push(f),f=tl1(t),f=cE(s),t=pf1(f),f=t<=f,Nf1))(f);else if(M<84303)f=fo(_),t=Zd2(u),v=Kn2(f*=t),t=o01(h);else{for(f=c.CFf,t=0;t<f;t++)w=tZ(t),z[w]=o[t];f=$9(r),t=nG(s),f%=t,_=SN2(f)}else M<84614?M<84574?(f=Rt1(s),t=j82(_),u=HX2(f-=t),f=LF(w),r=e[i++],f[constantPool[r]]+=1):(f=Or1(t),f=$u2(_),t=r92(u),f=vu(f=t!==f)):(f=z92(t),t=Rv2(f),s=Xf2(f=t==f),y=hv(f),null!=(P=gU(t)).variablePool?n(P,y,v):P[y]=f);else M<85085?M<84784?M<84758?M<84671?(f=Uk(l=z),f=Yv1(r),t=Fd(t),f=N2(f<<=t)):(f=rf(s),t=U8(f),t=n02(f=t<=f),f=HA(r),t=Em(s),_=bg2(f>>=t)):M<84759?(f=Uu(r),t=cf1(s),null!=(r=D2(_)).variablePool?n(r,t,f):r[t]=f,f=DX2(r),t=PI(f),f=MB(f=f<t)):(f=sP(z),t=sD2(f),_=Lu2(f=f<=t),f=cw2(r),t=Sb(s),_=fp(f>>=t)):M<85016?M<84785?(f=y91(t),t=Ma1(f),f=Y42(f+=t),f=j32(_),t=Sb2(w),u=MT(f=t<f)):(f=Pf2(t),t=hN2(f),s=Mp2(f=t==f),f=dD(_),t=ib1(u),v=_r1(f*=t)):(f=Aw2(u),t=e[i++],f?w=10:i+=t,f=pP(r),t=RJ2(t),f=Wr1(f<<=t)):M<85230?M<85171?M<85086?(f=mh(u),t=e[i++],f?w=10:i+=t,f=PC(z),t=en2(f),_=SW(f=f<=t)):(t=d61(h),f=Zn(r),t=eQ2(r),t=lP1(f=t<f)):M<85172?(f=x31(r),t=ga2(r),f=iT(t=mk2(f=t<f)),t=vj(r),null!=(r=Lf1(s)).variablePool?n(r,t,f):r[t]=f,f=nB2(r)):(f=Ll(r),t=Xh2(f),f=Vr(f=f<t),f=CK(t),t=CR2(r),r=Lp2(f&=t)):M<85301?M<85231?(f=Gf1(r),t=Cz2(s),_=wQ(f^=t),f=AY2(t),t=f2(r),r=sl2(f&=t)):(f=JG2(t),t=Iy2(r),null!=(r=xp(s)).variablePool?n(r,t,f):r[t]=f,f=G_(r),f=JT2(t),t=n1(f),s=uU(f=t==f)):(f=_f1(t),t=oF2(f),f=Gg(f+=t),f=E4(r),t=y11(t),f=uQ(f<<=t));else if(M<86117)if(M<85756)M<85549?M<85445?M<85354?(f=o0(t),t=zz2(f),r=Or2(f/=t),f=oc1(),t=LI2(),Ze1(f=t!=f)):f=T21(l=z):M<85446?(f=k52(_),t=pm2(r),null!=(r=et2(f)).variablePool?n(r,t,f):r[t]=f,f=TA(t),t=J82(f),r=qm2(f=t===f)):(_=ow(r),f=Pe2(~_),f=$32(),t=jy(),aB(f=t!=f)):M<85728?M<85550?(_=C(t),t=zj2(-_),f=e82(),t=Yr1(),Uf2(f=t!=f)):(f=EM2(_),t=e[i++],f?(i+=t,v=Yh1(f)):w=10,f=Ld(r),t=GG(r),t=Bn2(f=t<f)):(f=S$(s),t=JF(f),t=Qc(f=t<=f),f=B3(l=z));else if(M<85924)if(M<85876)M<85757?(f=b72(_),t=Th2(r),null!=(r=zu2(f)).variablePool?n(r,t,f):r[t]=f,f=ut(u),t=vX2(v),d=HJ2(f|=t)):(f=Qa(t),t=yw(f),f=zz(s=EL2(f=t==f)),t=Xs(f),t=l61(f=t<=f));else{if(!(M<85877))throw f=OZ2(t);f=W42(_),t=jv(u),v=A61(f*=t),f=Vx(t),t=Dk(r),r=T31(f&=t)}else M<86031?M<85925?(_=AT2(f),t=F0(void 0),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=LF2(f)):(f=QH(_),t=e[i++],f?(i+=t,v=Kq2(f)):w=10,f=eJ2(r),t=No1(s),_=_r(f^=t)):(f=m5(t),f=yi2(t),t=NE2(r),r=Zv2(f&=t));else if(M<86572)M<86389?M<86337?M<86118?(f=Ov2(z),t=$i(f),_=YC2(f=f<=t)):(_=ir2(r),s=fa2(!_),f=pJ(t),t=j2(f),s=uO2(f=t==f)):M<86338?(y=wV(f),null!=(P=RM2(t)).variablePool?n(P,y,v):P[y]=f,f=Qi(s),t=hN(f),t=H3(f=t<=f)):(f=EO(z),t=MM(f),_=Na2(f=f<=t),f=zF(t),t=Mm2(f),f=Z02(f+=t)):M<86529?M<86390?(f=Xj(),t=H41(),IA(f=t!=f),f=ot2(s),t=_M(_),u=Ip2(f-=t)):(t=dP1([]),f=Hj(r),t=UP(f),f=mC2(f=f<t)):(f=xH(s),t=Kn1(f),t=Gs1(f=t<=f),f=F6(r),t=rC(f),f=wE(f=f<t));else if(M<86843)if(M<86714)if(M<86573){f=l81(t),t=bc(f),f/=t,r=M62(f);for(f=c.CFf,t=0;t<f;t++)w=ZS2(t),z[w]=o[t]}else f=hB2(s),t=Zs1(_),u=wi2(f-=t),f=e[i++],i+=f;else f=M<86715?(f=e[i++],t=ez2(f),aV2({})):(f=Sf(r),t=xf(f),f=ho(f=f<t),f=C_(r),t=i71(t),KL2(f<<=t));else M<86907?M<86844?(_=Ai(f),t=BB2(void 0),_=Pr(r),f=p1(~_)):(f=Su2(),t=Wy(),qa2(f=t!=f),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Cl(f)):(f=A72(t),t=sr(f),r=Q61(f=t===f),_=Qy(f),t=FQ(void 0));else if(M<93691)if(M<90324)if(M<88698)if(M<87737)if(M<87252)if(M<87033)if(M<87017)M<86950?(f=aQ2(r),t=VA2(t),f=YW(f<<=t),f=EO2(r),r=e[i++],--f[constantPool[r]]):(f=XJ2(t),t=rk2(f),f=XV2(f=t-f),f=B42({}));else if(M<87018){for(d=c.CFf,f=0;f<d;f++){let c=v=U91(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=yU(_),t=yL(u),v=JE(f*=t)}else{f=LP(t),t=fd2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=zW2(f),_=_v(r),s=Iu2(!_)}else M<87090?M<87034?(f=oh(t),t=Zw(r),r=Uv(f&=t),f=e[i++],t=SS(f)):(f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=nr(f),f=P0(z),t=Wg(f),_=ko2(f=f<=t)):M<87091?(t=_11([]),_=Ew(t),t=Y$(-_)):(f=r9({}),f=ao(r),r=e[i++],--f[constantPool[r]]);else if(M<87519)if(M<87455)if(M<87253){{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,rR()):s.splice(0,0,m12());for(f=i8(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}f=TY(r),t=Ut1(s),f>>=t,_=s81(f)}else f=PU2(_),t=hy2(r),null!=(r=Hk2(f)).variablePool?n(r,t,f):r[t]=f,f=kM(r),t=w01(t),f=bX(f<<=t);else M<87456?(f=Fe2(r),t=ya1(s),_=Cv(f>>=t),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=Kk2(f)):(f=q92(r),t=$_(r),t=Bq(f=t<f),f=q32(s),t=NY(f),t=I22(f=t<=f));else f=M<87713?M<87520?FV2(~(_=gS2(r))):(f=qQ2(r),t=dD2(t),f=s11(f<<=t),f=zN(t),t=RT2(f),AF(f+=t)):(f=YB2(r),t=Uw(r),t=lT2(f=t<f),f=aX2(r),t=ba(f),Mm(r=delete t[f]));else M<88197?M<88054?M<87847?M<87738?(f=gV(z),t=PP2(f),_=YB(f=f<=t),f=CF(t),t=jQ2(r),r=p21(f&=t)):(f=Bf2(r),t=se1(s),_=ur1(f%=t),f=XB({})):M<87848?(f=Yy1(t),t=zh1(f),f=Ik2(f=t-f),f=aL(t),t=dB2(r),null!=(r=Zt2(s)).variablePool?n(r,t,f):r[t]=f,f=fk2(r)):(f=JI(t),t=VH(f),f=hq(s=dA2(f=t==f)),t=Aw(_),u=Bw(f-=t)):M<88194?M<88055?(f=WX(u),t=aG(v),d=b$(f|=t),f=BM(r),t=ZM2(r),t=vx2(f=t<f)):(f=k92(t),t=U32(f),f=PR(f+=t)):(f=m31(_),t=yW2(u),f=BC2(f=t!==f),f=iv2(t),t=jS(r),r=ze1(f&=t)):M<88345?M<88208?M<88198?(f=tk(t),t=oX2(f),f=Oz(f=t-f),f=e[i++],t=e[i++],f=new RegExp(constantPool[f],constantPool[t]),s=vW(f)):(f=F62(l=z),f=Cs2(t),t=No2(r),r=YL(f&=t)):M<88209?(_=P_(r),f=ui2(~_),f=Ug2(_),t=pc(u),f=FL(f=t!==f)):(f=SK(r),t=fG2(t),f=x0(f<<=t),t=_$2([])):M<88454?M<88346?(f=Cq2(t),r=e[i++],f?i+=r:w=9,f=sl(r),t=lv(f),f=On(f=f<t)):(f=bP1(z),t=gQ(f),_=GD(f=f<=t),f=jK(s),t=yv2(f),t=y02(f=t<=f)):(f=G52(u),t=e[i++],f?w=10:i+=t);else if(M<89401)if(M<89146)if(M<88999)M<88745?M<88699?(f=hJ({}),f=z61(t)):(y=$$2(f),null!=(P=T22(t)).variablePool?n(P,y,v):P[y]=f,f=dI2(_),t=lt(w),u=cC(f=t<f)):M<88746?(t=sQ2(h),f=e[i++],t=KW2(f)):(f=Qb2(_),t=wg(r),null!=(r=rR2(f)).variablePool?n(r,t,f):r[t]=f,f=Jm(r),t=Qo2(s),_=Do1(f>>=t));else if(M<89106){if(M<89e3)return void(f=Pp({}));for(f in f=$u(_),t=qK2(w),u=an(f=t<f),p=[],t=Cy2(t))p.push(f);z.for_in_xh_cbb_list=p}else _=R51(r),s=sj2(!_),f=e[i++],t=WD2(r),r=XF(f),t[constantPool[f]]=r;else if(M<89266)if(M<89185)M<89147?(f=qE(r),t=_Z2(r),t=CA2(f=t<f),f=l3(r),t=Qj(s),_=EY2(f>>=t)):(f=o41(t),t=ga1(f),s=Af2(f=t==f),_=MY2(r),f=gy1(~_));else if(M<89186){f=ea1(r),t=v1(t),f<<=t,f=_12(f);for(f=c.CFf,t=0;t<f;t++)w=Ab1(t),z[w]=o[t]}else f=qF2(r),(t=Lu1(f)).push(f),f=wM(t),f=eW(t),t=Ef(f),r=qS(f=t===f);else if(M<89391)M<89267?(y=Ie2(f),null!=(P=tT2(t)).variablePool?n(P,y,v):P[y]=f,f=a_1(s),t=zX2(_),u=u42(f-=t)):(y=f62(f),null!=(P=hP2(t)).variablePool?n(P,y,v):P[y]=f,f=G81(r),t=gK2(t),f=uc2(f<<=t));else{for(f=e[i++],r=P52(_),g=[],t=0;t<f;t++)g.splice(0,0,fL2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=L82(s)}else M<89873?M<89595?M<89484?M<89402?(f=ZW({}),f=k9(z),t=EC2(f),_=qn(f=f<=t)):(f=yB(u),t=t$2(v),d=ks1(f|=t),f=j91(t),t=Qu2(f),f=A_(f=t-f)):M<89485?(f=BW(r),t=e[i++],f?w=10:(i+=t,s=XW(f)),f=RX2(t),t=wr(f),r=_41(f/=t)):(f=e[i++],t=H21(r),r=sy2(f),t[constantPool[f]]=r,f=p$(r),t=Eu(s),_=CH2(f^=t)):M<89858?M<89596?(f=L4(t),t=i22(f),f=B91(f+=t),f=mu(t),t=v3(f),s=AK2(f=t==f)):(f=RM(r),t=fX(f),f=ZN(f=f in t),f=Zi1(u),t=sA2(v),d=q91(f|=t)):(f=Jq(r),t=pO2(s),f=CN(_=sn(f>>=t)),t=Di1(u),v=CV2(f*=t)):M<90206?M<90096?M<89874?(f=ii({}),f=NK(t),t=G21(r),r=HQ2(f&=t)):(f=Oa1(u),t=pk(v),d=hz2(f|=t),f=Pk2(r),t=wR2(f),r=delete t[f],f=s01(r)):M<90097?(f=Kv2(r),t=PF(r),t=b21(f=t<f),f=e[i++],t=OR2(f)):(_=L81(f),t=fZ(void 0),f=eX2({})):M<90259?M<90207?(f=bd2(r),t=U(s),_=OQ2(f%=t),f=NO(),t=tI(),tb1(f=t!=f)):(f=CR(r),t=ou1(s),_=Ed1(f^=t),f=Yp(r),t=iu(r),t=J51(f=t<f)):(f=B02(r),t=Ja2(f),f=Z22(f=f in t),_=GR(r),s=s2(!_));else if(M<91628)if(M<91020)if(M<90859)M<90563?M<90464?M<90325?(f=Na1(t),t=f52(f),s=$0(f=t==f),_=ze(t),t=xx(-_)):(f=uG2(z),t=d42(f),_=iF2(f=f<=t),_=JA2(r),f=Fo1(typeof _)):M<90465?(f=Te2({}),f=zq(t),t=NJ(f),r=rx(f/=t)):(t=wd([]),_=DC(r),s=Ug(!_)):M<90610?M<90564?(f=Po1(u),t=s91(f),f=W72(f=(t=a4(f=Aa2(t=IP2(f>>>=t))))-f)):(f=RL2(s),t=Tp(f),t=tD(f=t<=f),_=wO(r),s=P11(!_)):t=(M<90611?(function(){for(f=e[i++],r=L9(_),g=[],t=0;t<f;t++)g.splice(0,0,hf(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=mA2(s)}(),f=XY2(u),t=xm(f),f>>>=t,Ai2):(f=yG(u),t=Z_2(v),d=Xh1(f|=t),f=DO2(u),t=I72(f),f>>>=t,d91))(f);else if(M<90918)if(M<90908){if(!(M<90860))throw f=LT(t);f=ye(r),t=Eb(f),f=Tc(f=f<t),f=e[i++],t=an1(f)}else if(M<90909)f=uZ(_),t=ON2(u),f=z51(f=t!==f),f=Et(w),r=e[i++],f[constantPool[r]]+=1;else{for(z.for_in_xh_cbb_list=p,f=e[i++],r=rf1(_),g=[],t=0;t<f;t++)g.splice(0,0,Kg(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=uc1(s),f=Kc2(r),t=kd1(f),f=Gu(f=f<t)}else M<90938?M<90919?(f=Do(_),t=fH(u),v=T4(f*=t),f=pd1(u),t=oh1(v),d=mw1(f|=t)):(f=sC2(r),t=G02(f),f=MY(f=f in t),_=wU2(r),s=Jl(!_)):(f=Qm2(_),t=Z41(w),u=ZE(f=t<f),_=Xo2(f),t=ku1(void 0));else if(M<91410)if(M<91103)if(M<91039)if(M<91021)_=kV2(r),f=$H2(~_),f=GM(t),t=R(f),s=RX(f=t==f);else{t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}f=Xt2(r),t=Xh(s),_=TX2(f>>=t)}else if(M<91040)f=GT(r),t=jA(s),_=ft1(f%=t),f=uw1(s),t=D51(f),t=__(f=t<=f);else{f=f9(u),t=Hi(f),f>>>=t,t=zC2(f);{for(f=e[i++],r=zZ(_),g=[],t=0;t<f;t++)g.splice(0,0,e6(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=dK(s)}}else M<91328?M<91104?(f=OG(t),t=oj(f),s=tU2(f=t==f),_=l32(r),s=fD(!_)):(f=t41(_),t=os2(u),v=IX(f*=t)):(f=lq2(t),t=g11(f),s=ZR2(f=t==f),f=Gj2(r),t=fr2(s),_=Bj(f%=t));else if(M<91542)if(M<91427)M<91411?(t=xJ2(h),f=Zl(u),t=Iw1(v),d=yZ(f|=t)):(f=Za(_),t=e[i++],f?(i+=t,v=Ob2(f)):w=10,f=el1(t),t=O3(f),f=Rk2(f+=t));else if(M<91428)f=i3(),t=h6(),L8(f=t!=f),f=Ed2(_),t=_P(w),u=yM(f=t<f);else{for(f=e[i++],r=FW(_),g=[],t=0;t<f;t++)g.splice(0,0,ns1(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=N_2(s),f=Mh(r),t=e$(f),f=C21(f=f<t)}else M<91582?M<91543?(f=wA(t),t=ZO2(f),f=cH(f+=t),f=Ja(u),t=Bm(f),t=cc(f>>>=t)):(f=Gt2(r),t=O_1(s),_=bm2(f^=t),y=BY(f),null!=(P=FX(t)).variablePool?n(P,y,v):P[y]=f):(_=Ky1(r),s=WW(!_),f=jo(_),t=oY(u),v=Hc2(f*=t));else if(M<92588)if(M<92096)M<91934?M<91802?M<91629?(f=Pk(u),t=bv2(v),d=br2(f|=t),_=pw1(r),f=pR(typeof _)):(_=Ts2(t),t=Tq2(-_)):M<91803?(f=fA(u),t=gz(f),t=kP1(f>>>=t),f=m01(r),t=cQ2(s),_=Gx(f%=t)):(f=ej2(r),t=Jt(t),f=TL(f<<=t),f=$R(t),t=Js(r),r=$d2(f&=t)):M<92021?M<91935?(f=aE(_),t=yW(w),u=up2(f=t<f),f=l2(s),t=eh(_),u=FP(f-=t)):(f=kZ2(t),t=lV(f),r=Tg(f/=t)):(_=uJ(r),s=Pd2(!_),f=Pn1(u),t=EF(v),d=KU2(f|=t));else if(M<92270)if(M<92215)if(M<92097){_=k01(r),s=K1(!_),t=e[i++],r=e[i++],s=e[i++];try{if(u=cbb_jsvmp(r,i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v}),i=t+i,"-90_cbb"==u)return u}catch(b){if(v=b,"-90_cbb"==(u=cbb_jsvmp(f,i=t+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,args:g,argsList:o,duei:c,all:l,a7:v})))return u;w=10}finally{if("-90_cbb"==u)return u;if(w=10,"-90_cbb"==(u=cbb_jsvmp(r,i=r+i,i,c,g.length,1,{shuz:e,cbbb:z,allthis:h,argsList:o,args:g,duei:c,all:l,a7:v})))return u;w=10,i+=s}}else f=kr2(_),t=Md1(w),u=yy1(f=t<f),f=iV2(r),t=fb2(f),f=oe1(f=f<t);else if(M<92216)f=yA(t),t=nq2(r),null!=(r=V42(s)).variablePool?n(r,t,f):r[t]=f,f=sf2(r),f=K11(r),t=Gt1(t),f=_U(f<<=t);else{{for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,gJ2()):s.splice(0,0,d11());for(f=il2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}}f=vd1(z),t=h72(f),f=f<=t,_=qe(f)}else if(M<92506)if(M<92271)f=ps1(_),t=s41(u),v=kA(f*=t),f=kp2(r),t=eP1(f),f=id(f=f<t);else{{for(f in p=[],t=kX2(t))p.push(f);z.for_in_xh_cbb_list=p}f=up(s),t=e_2(_),f-=t,u=x8(f)}else _=T42(f),t=Fk2(void 0),f=uX2(r),t=iQ(s),_=Ig2(f^=t);else if(M<93283)M<92851?M<92686?M<92589?(f=i$2(r),t=e[i++],f?w=10:(i+=t,s=Pa2(f))):(f=sB2(u),t=nv(f),f=bU(t=ph2(f>>>=t)),t=rs(s),v=Nd2(f=f instanceof t)):M<92687?(f=hV(_),t=dz(u),v=jW2(f*=t),f=nb(t),t=OA2(f),f=b82(f+=t)):(f=I32(t),t=ex(f),f=kG(r=zv1(f=t===f)),t=yb2(s),_=Z32(f%=t)):M<93083?M<92852?(_=Fp(r),f=DT2(typeof _),f=yo1(r),t=wh2(r),t=aZ2(f=t<f)):(f=iq(t),t=hw2(f),r=KR(f/=t),_=ly1(r),s=n$2(!_)):(f=e[i++],t=l11(r),r=Ms(f),t[constantPool[f]]=r,f=D31(r),t=AL2(r),t=UT2(f=t<f));else if(M<93505)M<93429?M<93284?(f=fo2({}),f=uk(r),t=G6(s),_=H31(f^=t)):(f=OD2(_),t=xz(w),u=_P1(f=t<f),f=T82(t),t=rz(f),f=fB2(f=t-f)):M<93430?(f=L11(_),t=Dy2(u),f=J$(f=t!==f),f=Au1(r),t=t0(r),t=Ii(f=t<f)):(f=X11(t),t=dM2(f),f=V2(f+=t),f=Io1(_),t=vY(r),null!=(r=_X2(f)).variablePool?n(r,t,f):r[t]=f);else if(M<93637){if(!(M<93506))return void(t=Zl2(h));f=_O2(_),t=t5(w),u=$V2(f=t<f),f=wS2(t)}else t=p01(h),f=fE(t),t=NP2(f),r=_E2(f=t===f);else if(M<96776)if(M<95286)if(M<94555)if(M<94230)if(M<93875)if(M<93754)if(M<93692)f=te1(r),t=ZA(r),t=Hb2(f=t<f),_=yp2(f),t=f02(void 0);else for(f=z22(_),t=kY2(u),f=WJ(f=t!==f),f=c.CFf,t=0;t<f;t++)w=cr2(t),z[w]=o[t];else{if(!(M<93755))return f=j02(s),t=W5(f),void(t=D_2(f=t<=f));t=lo2([]),f=bb(t)}else if(M<94132)if(M<93876)f=Q11(t),t=pJ2(f),r=a82(f=t===f),f=XR(s),t=fj(f),t=gf(f=t<=f);else{f=$02(t),t=PJ2(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=xB2(f),f=AV2(t),t=P72(f),f=fP1(f=t-f)}else M<94133?(f=gl2(r),t=D52(t),f=TS(f<<=t),f=RH2(r),t=FY(t),f=g$(f<<=t)):(f=iz(_),t=m02(u),f=MK(f=t!==f),f=$M(u),t=e[i++],f?w=10:i+=t);else if(M<94392)if(M<94285)t=M<94231?(f=Nf(t),ti(f=e[i++])):(y=uj(f),null!=(P=BG(t)).variablePool?n(P,y,v):P[y]=f,NM2(h));else if(M<94286)for(f=$v(t),d=c.CFf,f=0;f<d;f++){let c=v=As1(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}else t=Il2(h),f=e[i++],t=KF(f);else M<94450?M<94393?(f=JW2(z),t=So1(f),f=Nm(_=f82(f=f<=t)),t=vY2(w),u=hQ(f=t<f)):(f=vG2(s),t=x62(f),t=nI2(f=t<=f),y=wl2(f),null!=(P=ml(t)).variablePool?n(P,y,v):P[y]=f):(f=hu2(r),t=zx(t),f=Y$2(f<<=t),f=uU2(t),t=h02(f),s=xK2(f=t==f));else if(M<94956)M<94831?M<94768?M<94556?(f=e[i++],t=Le2(r),r=QZ2(f),t[constantPool[f]]=r,f=yg(r),t=vq2(t),f=AI2(f<<=t)):(f=OH(u),t=FJ(f),t=pv(f>>>=t),f=aW(_),t=e[i++],f?(i+=t,v=Vb1(f)):w=10):f=(M<94769?(f=kQ2(u),t=UF(v),d=Ot2(f|=t),f=Z52(_),t=TO(u),f=t!==f,tc):(f=lg(t),t=Pg2(f),s=pA(f=t==f),f=p82(t),t=bh(f),f=t-f,yR))(f):M<94867?M<94832?(f=z71(t),f=r52(_),t=bq2(u),v=vR2(f*=t)):(f=IP1(t),t=d71(f),f=Wf(f+=t),f=R_(t),t=gn2(f),s=rj2(f=t==f)):(f=V3(r),t=kC2(s),_=Yr2(f>>=t),f=MH(r),t=B8(f),f=v31(f=f in t));else if(M<95188)if(M<95133)if(M<94957)f=i41(t),t=Dc1(f),r=sF(f=t===f),f=iA2(t);else{for(f=e[i++],r=Tr(_),g=[],t=0;t<f;t++)g.splice(0,0,kv2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=PK(s),f=kn1(_),t=C52(w),u=X82(f=t<f)}else t=M<95134?(f=Op2(r),t=be2(s),_=o$2(f^=t),_=c4(f),co1(void 0)):(f=K6(r),t=iR(s),_=HU(f^=t),eL2(h));else M<95227?M<95189?(_=Dn(f),t=TL2(void 0),t=$F2([])):(f=W22(),t=_X(),c41(f=t!=f),f=CA(t),t=UN(r),r=I7(f&=t)):(_=Om(r),f=ha(typeof _),f=CM(u),t=tB(v),d=NK2(f|=t));else if(M<96085)if(M<95770)if(M<95596)if(M<95559)t=M<95287?(f=Mo(u),t=W_(f),t=c$2(f>>>=t),f=eV2(u),t=NI(f),cf(f>>>=t)):(f=Pf(_),t=yR2(u),v=cI2(f*=t),f=e[i++],TP1(constantPool[f]));else if(M<95560)f=IE2(t),t=DW(f),f=U_1(f+=t);else{for(f in f=Sz2(r),t=O2(s),_=Qm(f>>=t),p=[],t=CC2(t))p.push(f);z.for_in_xh_cbb_list=p}else M<95683?t=M<95597?(y=uH2(f),null!=(P=C62(t)).variablePool?n(P,y,v):P[y]=f,f=ue1(u),t=vJ2(f),bS(f>>>=t)):(f=X4(t),t=vE2(f),r=mp(f=t===f),f=e[i++],Wv2(constantPool[f])):(f=WM(t),t=VE(f),r=ww(f=t===f),f=pL(_),t=yH2(u),f=I6(f=t!==f));else if(M<95989)if(M<95880)if(M<95771)f=ao1(r),t=Yf2(s),_=td(f%=t),f=ij2(r),t=UO2(s),_=WF(f^=t);else{for(f=cc2(z),t=OQ(f),_=hs(f=f<=t),z.for_in_xh_cbb_list=p,f=e[i++],r=xJ(_),g=[],t=0;t<f;t++)g.splice(0,0,Df2(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=$h(s)}else M<95881?(f=G61(_),t=e21(u),f=C71(f=t!==f),_=sa1(r),s=f72(!_)):(f=Wa1(r),t=O02(s),_=sZ2(f>>=t),f=jn1(w),r=e[i++],f[constantPool[r]]+=1);else M<96080?M<95990?(f=Up(_),t=EN2(u),v=Ll1(f*=t),f=Du2(r),t=Gb(f),f=ji2(f=f<t)):(_=_J(r),s=hS(!_),_=vN(f),t=e0(void 0)):(f=FF2(_),t=d81(w),u=wO2(f=t<f),f=io2(w),r=e[i++],f[constantPool[r]]+=1);else if(M<96274)if(M<96172)if(M<96110)if(M<96086)f=ts1(r),t=t4(f),f=X91(f=f<t),f=nO(u),t=Mj2(f),t=rX(f>>>=t);else for(f=XM2(r),t=to1(s),_=On2(f^=t),f=c.CFf,t=0;t<f;t++)w=ik2(t),z[w]=o[t];else if(M<96111)f=DW2(),t=$e(),sp2(f=t!=f),f=tc1(_),t=y72(u),v=Ql1(f*=t);else{for(z.for_in_xh_cbb_list=p,f=e[i++],r=u82(_),g=[],t=0;t<f;t++)g.splice(0,0,bR(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Zy2(s),f=WX2(r),t=ag(s),_=yT(f>>=t)}else if(M<96225)if(M<96173)f=cV2(w),r=e[i++],f[constantPool[r]]+=1,f=dg2(s),t=qr(_),u=Rm2(f-=t);else{for(z.for_in_xh_cbb_list=p,f=e[i++],r=o91(_),g=[],t=0;t<f;t++)g.splice(0,0,Ps1(u));offnew=1,s=r==RegExp?new RegExp(g[0],g[1]):new r(...g),offnew=0,f=Wc(s),f=yB2(t),t=r62(f),r=ja2(f/=t)}else _=rW(f),t=N11(void 0),f=eK2(t),t=v72(r),null!=(r=ie1(s)).variablePool?n(r,t,f):r[t]=f,f=P01(r);else if(M<96659)if(M<96638)if(M<96275)f=Lv2(t),r=e[i++],f?i+=r:w=9,f=fg(_),t=cp(u),f=bQ2(f=t!==f);else{for(d=c.CFf,f=0;f<d;f++){let c=v=un1(f);l[c]=function(){var b=new cshduei;if(u=(1==offnew?(offnew=0,(w={variablePool:{},arguments:arguments,zhili:[]}).__proto__=z,cltothis(w.variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili)):(cltothis((w={variablePool:{},arguments:arguments,zhili:[]}).variablePool,changlc[z.variablePool[c]].variablePool),cltothis(w,w.variablePool,1),cltothis(w.zhili,changlc[z.variablePool[c]].zhili),w.__proto__=z),cbb_jsvmp(w,b,0,w.zhili,arguments,this)),0!=b.CFf){for(var l=b.cf(f);;){1;{if(0==b.CFf)break;w=b.cf(f)}}return l}}}f=Zd(_),t=wD2(u),f=Pw(f=t!==f)}else M<96639?(t=Iw2([]),f=X42(r),t=I71(s),_=LO(f%=t)):(f=v_(t),t=F11(f),f=ca(f+=t),t=iC2([]));else M<96762?M<96660?(f=RR2(r),t=E72(f),r=delete t[f],f=re(r),f=rI(s),t=X$(_),u=th1(f-=t)):(f=nn2(t),t=lE2(f),r=vi2(f/=t),f=e[i++],t=rU2(r),r=Bh1(f),t[constantPool[f]]=r):(t=PW([]),f=Po(t),t=hC(r),null!=(r=Vd2(s)).variablePool?n(r,t,f):r[t]=f,f=v61(r));else if(M<98635)if(M<97837)if(M<97374)if(M<97127)M<96954?M<96777?(f=f71(s),t=ud1(_),u=VD2(f-=t),f=Lb1(t)):(f=$L2(t),t=Eg2(r),r=a6(f&=t),f=wD(_),t=Jw1(w),u=ZI(f=t<f)):M<96955?(f=qt1(u),t=vt(f),t=cS2(f>>>=t),f=Jo2(r),t=H7(t),f=xr2(f<<=t)):(f=Ns1(_),t=Dg(w),u=Ko2(f=t<f),f=e[i++],t=a_2(constantPool[f]));else if(M<97254)if(M<97128){for(f=At(z),t=I51(f),_=Cb(f=f<=t),f=e[i++],r=T(_),g=[],t=0;t<f;t++)g.splice(0,0,Zy1(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=DX(s)}else f=o_2(r),t=Aa(f),f=N41(f=f<t),f=A02(t),t=jm2(f),r=JX(f=t===f);else if(M<97255){for(f=2*e[i++],r=[],s=[],t=0;t<f;t++)t<f/2?r.splice(0,0,Pl1()):s.splice(0,0,pb());for(f=Om2(t),t=0;t<r.length;t++){if(s[t]==f){i+=r[t];break}if(null==s[t]){i+=r[t];break}w=10}f=Jd2(),t=fP2(),o2(f=t!=f)}else _=JA(r),f=j1(~_),f=yX2(t),t=_w1(f),f=Ph1(f=t-f);else M<97596?M<97437?M<97375?(f=md1(t),f=Xb(r),t=YD2(s),null!=(r=Fq(_)).variablePool?n(r,t,f):r[t]=f):(f=is1(_),t=e[i++],f?(i+=t,v=$S(f)):w=10,f=bf(r),t=mG2(s),_=k0(f%=t)):f=M<97438?(f=ea2(t),t=ms2(r),r=ib(f&=t),f=a$2(_),t=_S2(u),uE(f=t!==f)):(f=KB2(),t=sl1(),lZ2(f=t!=f),f=ul(t),t=m81(r),null!=(r=lm2(s)).variablePool?n(r,t,f):r[t]=f,ac2(r)):M<97783?M<97597?(f=WG(_),t=hp(w),u=NR2(f=t<f),f=eU(t),t=KE2(f),r=pp(f/=t)):(f=xH2(r),t=k41(s),f=kv1(_=_d2(f%=t)),t=cr1(w),u=sW(f=t<f)):(f=U_(t),t=qP1(r),r=pa1(f&=t));else if(M<98092)if(M<98009)if(M<97935)if(M<97838)_=$82(r),f=Mf(~_),t=am2(h);else{for(f in f=Yf(r),t=Cg2(t),f=_0(f<<=t),p=[],t=km(t))p.push(f);z.for_in_xh_cbb_list=p}else M<97936?(f=go(s),t=hv2(f),t=yd(f=t<=f),f=Sp(s),t=GW(f),t=V51(f=t<=f)):(f=Vc(u),t=T0(v),d=hB(f|=t),f=c12(_),t=Fo(u),v=sz(f*=t));else M<98029?M<98010?(f=W9(r),(t=vh1(f)).push(f),f=au1(t),f=c81(t),t=tN(r),r=v21(f&=t)):(f=p32(z),t=rD2(f),_=Rt(f=f<=t),f=tN2(r),t=w3(f),r=delete t[f],f=tQ(r)):(f=rq(r),t=q82(t),f=bt(f<<=t),f=FU(_),t=e[i++],f?(i+=t,v=rY2(f)):w=10);else if(M<98359)if(M<98239)M<98093?(f=Ag(r),t=j51(s),_=pK2(f%=t),f=qq2(t),t=oa1(f),s=dH2(f=t==f)):(f=F7(t),f=kG2(r),t=Mz2(s),_=Hd(f^=t));else if(M<98240){{for(f=e[i++],r=bR2(_),g=[],t=0;t<f;t++)g.splice(0,0,US2(u));s=r==window.setTimeout?setTimeout(...g):r==window.atob?atob(...g):r==window.clearInterval?clearInterval(...g):r==window.setInterval?setInterval(...g):r==window.RegExp?RegExp(...g):r==window.alert?alert(...g):r.apply(l,g),t=iT2(s)}f=Jr(r),t=Iu1(r),f=t<f,t=o02(f)}else f=PE2(),t=Xd1(),qo(f=t!=f),f=X3(_),t=fI2(u),v=lA(f*=t);else M<98535?M<98360?(f=q7(t),t=L91(f),s=TW(f=t==f),_=P4(r),s=EZ2(!_)):(f=U2(s),t=H01(_),u=V6(f-=t),t=kp([])):(f=ch1(_),t=lN2(w),u=Zj(f=t<f),f=c42(t),t=lx(f),r=An2(f=t===f));else if(M<99362)if(M<99128)if(M<98870)if(M<98643)M<98636?(f=vI(s),t=Z82(f),t=se(f=t<=f),f=rS(r),t=e[i++],f?w=10:(i+=t,s=f5(f))):(f=HK2(r),t=p52(r),t=DN2(f=t<f),f=az(_),t=uw2(u),f=wq2(f=t!==f));else if(M<98644)f=jQ(r),t=RL(s),null!=(r=Wy2(_)).variablePool?n(r,t,f):r[t]=f;else{for(f=c.CFf,t=0;t<f;t++)w=lw(t),z[w]=o[t];t=_S(h)}else M<98991?M<98871?(f=gP1(r),t=e[i++],f?w=10:(i+=t,s=wP1(f)),f=Cw(_),t=tD2(w),u=BL(f=t<f)):(f=PG2({}),t=Xl([])):(f=T51(z),t=gE2(f),_=_5(f=f<=t),f=wT2(r),t=km2(s),_=of(f^=t));else if(M<99248)if(M<99158)t=(M<99129?(_=TH2(t),t=TR(-_),AX2):(f=Dz(t),t=Yc2(f),r=Ty2(f=t===f),Lx))([]);else{if(!(M<99159))return;f=ew1(r),t=z82(s),_=PT(f%=t),f=lY(t),t=xc2(f),f=Fl2(f=t-f)}else if(M<99322)if(M<99249){{for(f in p=[],t=Q01(t))p.push(f);z.for_in_xh_cbb_list=p}f=IV2(t)}else{throw f=Ke1(t);f=po2(),t=w1(),f=t!=f,Lg2(f)}else _=Hw2(f),t=TF(void 0),f=D0(r),t=x6(t),f=I4(f<<=t);else if(M<99730)if(M<99518)if(M<99383)if(M<99363){f=Uy(t),t=T6(f),f+=t,f=w6(f);{f=oK(t),t=t7(r);try{f=t[f]}catch(b){f=window[f]}l=t,f=Ii1(f)}}else t=B5([]),f=d21(t),t=gk(r),null!=(r=N(s)).variablePool?n(r,t,f):r[t]=f,f=sY2(r);else M<99384?(_=vv(r),s=p_(!_),f=Gi(u),t=bd(f),t=G2(f>>>=t)):f=Fb2({});else M<99693?M<99519?(f=XY(_),t=ru(u),f=uV(f=t!==f),f=t82(t),t=dd1(f),r=lf2(f=t===f)):(f=or(u),t=Qb1(v),d=WQ(f|=t),f=Fr2(t),t=Tc2(f),s=tH(f=t==f)):(f=e[i++],i+=f,_=Sn(f),t=_B(void 0));else M<99876?M<99781?M<99731?(t=ui(h),f=e[i++],t=Sm(r),r=DG2(f),t[constantPool[f]]=r):(f=e[i++],t=R42(f),f=$c1(r),t=FR(r),t=wG(f=t<f)):M<99782?(f=g0(),t=k12(),Tb2(f=t!=f),f=Ay(z),t=Lw2(f),_=vU(f=f<=t)):(f=e[i++],t=iH(f),f=WP2(_),t=tK2(u),f=vc2(f=t!==f)):M<99954?M<99877?(f=s12(t),t=O81(f),r=vD(f=t===f),f=kW(t),t=Ak(f),r=jh1(f/=t)):(f=SI2(t),t=$x(r),f=bH2(r=Ci2(f&=t)),t=w32(s),_=Cr1(f^=t)):(f=FO(s),t=Va2(f),t=bs2(f=t<=f),f=Sy1(r),t=Cb2(s),_=Su(f%=t))}}window.offnew=0,window.variablePool={},window.zhili=[],cltothis(window.variablePool,changlc.awcbb_yhh_fun0.variablePool),cltothis(window.zhili,changlc.awcbb_yhh_fun0.zhili),cbb_jsvmp(window,new cshduei,0,changlc.awcbb_yhh_fun0.zhili);
27182812/ChatGLM-LLaMA-chinese-insturct
119,839
src/transformers/models/jukebox/modeling_jukebox.py
# coding=utf-8 # Copyright 2022 The OpenAI Team Authors and HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """PyTorch Jukebox model.""" import math import os from typing import List, Optional, Tuple import numpy as np import torch import torch.nn.functional as F from torch import nn from torch.nn import LayerNorm as FusedLayerNorm from ...activations import ACT2FN from ...modeling_utils import PreTrainedModel from ...utils import add_start_docstrings, logging from ...utils.logging import tqdm from .configuration_jukebox import ATTENTION_PATTERNS, JukeboxConfig, JukeboxPriorConfig, JukeboxVQVAEConfig logger = logging.get_logger(__name__) JUKEBOX_PRETRAINED_MODEL_ARCHIVE_LIST = [ "openai/jukebox-1b-lyrics", "openai/jukebox-5b-lyrics", # See all Jukebox models at https://huggingface.co/models?filter=jukebox ] def filter_logits(logits, top_k=0, top_p=0.0, filter_value=-float("Inf")): """ Filter a distribution of logits using top-k and/or nucleus (top-p) filtering Args: logits (`torch.Tensor`): logits distribution shape (vocabulary size) top_k (`int`, *optional*, defaults to 0): When `top_k >0` keep only top key tokens with highest probability (top-k filtering). top_p (`int`, *optional*, defaults to 0): When `top_p>0.0` keep the top tokens with cumulative probability >= `top_p` (nucleus filtering). """ logits = logits.clone() top_k = min(top_k, logits.size(-1)) # Safety check if top_k > 0: # Remove all tokens with a probability less than the last token of the top-k indices_to_remove = logits < torch.topk(logits, top_k, dim=-1)[0][..., -1:] logits[indices_to_remove] = filter_value if top_p > 0.0: sorted_logits, sorted_indices = torch.sort(logits, descending=True, dim=-1) cumulative_probs = torch.cumsum(F.softmax(sorted_logits, dim=-1), dim=-1) # Remove tokens with cumulative probability above the threshold sorted_indices_to_remove = cumulative_probs > top_p # Shift the indices to the right to keep also the first token above the threshold sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone() sorted_indices_to_remove[..., 0] = 0 # indices_to_remove = sorted_indices[sorted_indices_to_remove] indices_to_remove = torch.zeros_like(logits, dtype=torch.bool).scatter_( dim=-1, index=sorted_indices, src=sorted_indices_to_remove ) logits[indices_to_remove] = filter_value return logits def get_relevant_lyric_tokens(full_tokens, max_n_lyric_tokens, total_length, offset, duration): """ Extract only the relevant tokens based on the character position. A total of `max_n_lyric_tokens` tokens will be returned. If the provided token sequence is smaller, it will be padded, otherwise, only characters ranging from the midpoint - `max_n_lyric_tokens//2` to the midpoint + `max_n_lyric_tokens//2` will be returned. This *focuses* on the most relevant tokens (in time) for the sequence. Args: full_tokens (`List[int]`): List containing the token ids of the entire lyrics. total_length (`int`): Total expected length of the music (not all of it is generated, see duration), in samples. offset (`int`): Starting sample in the music. If the offset is greater than 0, the lyrics will be shifted take that into account duration (`int`): Expected duration of the generated music, in samples. The duration has to be smaller than the total length, which represent the overall length of the signal, """ full_tokens = full_tokens[0] if len(full_tokens) < max_n_lyric_tokens: tokens = torch.cat( [torch.zeros(max_n_lyric_tokens - len(full_tokens), dtype=torch.long).to(full_tokens.device), full_tokens] ) indices = [-1] * (max_n_lyric_tokens - len(full_tokens)) + list(range(0, len(full_tokens))) else: midpoint = int(len(full_tokens) * (offset + duration / 2.0) / total_length) midpoint = min(max(midpoint, max_n_lyric_tokens // 2), len(full_tokens) - max_n_lyric_tokens // 2) tokens = full_tokens[midpoint - max_n_lyric_tokens // 2 : midpoint + max_n_lyric_tokens // 2] indices = list(range(midpoint - max_n_lyric_tokens // 2, midpoint + max_n_lyric_tokens // 2)) return tokens.unsqueeze(dim=0), indices # Break total_length into hops/windows of size n_ctx separated by hop_length def get_starts(total_length, n_ctx, hop_length): starts = [] for start in range(0, total_length - n_ctx + hop_length, hop_length): if start + n_ctx >= total_length: # Last hop could be smaller, we make it n_ctx to maximise context start = total_length - n_ctx starts.append(start) return starts def get_alignment(music_tokens, labels, prior, config): level = prior.levels - 1 # Top level used n_ctx = prior.n_ctx tokens = music_tokens[level] batch_size, total_length = tokens.shape[0], tokens.shape[1] if total_length < n_ctx: padding_length = n_ctx - total_length tokens = torch.cat( [tokens, torch.zeros(batch_size, n_ctx - total_length, dtype=tokens.dtype, device=tokens.device)], dim=1 ) total_length = tokens.shape[1] else: padding_length = 0 hop_length = int(config.hop_fraction[-level - 1] * prior.n_ctx) alignment_head, alignment_layer = config.prior_alignment_head[0], config.prior_alignment_layer[0] attn_layers = {alignment_layer} alignment_hops = {} indices_hops = {} for start in tqdm(get_starts(total_length, n_ctx, hop_length), desc="Computing lyric to music alignment "): end = start + n_ctx # set metadata offset, sample_length and lyrics tokens metadata, indices_hop = prior.get_metadata(labels, start, config.sample_length, get_indices=True, offset=0) tokens_bs = torch.chunk(tokens, batch_size, dim=0) metadata_bs = torch.chunk(metadata, batch_size, dim=0) w_hops = [] for tokens_i, metadata_i in zip(tokens_bs, metadata_bs): w_hop = prior.forward_tokens(tokens_i[:, start:end], [], metadata_i, get_attn_weights=attn_layers) w_hops.append(w_hop[0][:, alignment_head]) del w_hop weights = torch.cat(w_hops, dim=0) del w_hops alignment_hop = weights.float().cpu().numpy() del weights # alignment_hop has shape (bs, n_ctx, nb_relevant_lyric_tokens) # indices_hop is a list of len=bs, each entry of len hps.nb_relevant_lyric_tokens indices_hops[start] = indices_hop alignment_hops[start] = alignment_hop # Combine attn for each hop into attn for full range # Use indices to place them into correct place for corresponding source tokens alignments = [] for item in range(batch_size): # Note each item has different length lyrics full_tokens = labels[0, 3:] alignment = np.zeros((total_length, len(full_tokens) + 1)) for start in reversed(get_starts(total_length, n_ctx, hop_length)): end = start + n_ctx alignment_hop = alignment_hops[start][item] indices = indices_hops[start][item] alignment[start:end, indices] = alignment_hop alignment = alignment[: total_length - padding_length, :-1] # remove token padding, and last lyric index alignments.append(alignment) return alignments def save_temp_audio(fname, lvl, metas, aud): aud = torch.clamp(aud, -1, 1).cpu().numpy() for i in list(range(aud.shape[0])): if metas is not None: artists, genres, lyrics = list(metas)[i].values() path = f"{fname}/lvl_{lvl}-{artists}-{genres}-{lyrics[:5]}-{i}" np.save(path, aud[i]) else: np.save(f"{fname}/lvl_{lvl}-sample-{i}", aud[i]) def get_mask(mask, query_length, key_value_length, blocks, spread, device, sample, sample_t): # returns a mask of shape 1 x 1 x query_length x key_value_length or None if masking is not needed. if mask is None or query_length == 1: return None offset = sample_t - query_length if sample else max(key_value_length - query_length, 0) if mask == "autoregressive": # Masked dense mask = torch.ones(query_length, key_value_length, device=device).tril(offset) elif mask == "summary": # Masked summary mask = torch.ones(query_length, query_length, device=device).tril() mask = torch.ones(query_length, query_length, device=device).tril() mask = mask.view(query_length, blocks, query_length // blocks)[:, :-1, -key_value_length // blocks :] mask = ( torch.nn.functional.pad( mask, (0, 0, 1, 0), value=1, ) .contiguous() .view(query_length, key_value_length) ) elif mask == "prime": mask = torch.ones(query_length, key_value_length, device=device).tril(offset) return mask.view(1, 1, query_length, key_value_length) class JukeboxConv1D(nn.Module): def __init__(self, input_width, output_width): super().__init__() self.input_width = input_width self.output_width = output_width weight = torch.empty(input_width, output_width) bias = torch.zeros(output_width) self.weight = nn.Parameter(weight) self.bias = nn.Parameter(bias) def forward(self, hidden_states): size_out = (*hidden_states.size()[:-1], self.output_width) hidden_states = torch.addmm( self.bias.type_as(hidden_states), hidden_states.view(-1, hidden_states.size(-1)), self.weight.type_as(hidden_states), ) hidden_states = hidden_states.view(*size_out) return hidden_states class JukeboxResConv1DBlock(nn.Module): def __init__(self, config, conv_width, depth=1, res_scale=1.0): super().__init__() hidden_dim = config.res_convolution_multiplier * conv_width dilation = config.res_dilation_growth_rate**depth padding = dilation self.res_scale = res_scale self.activation = nn.ReLU() self.conv1d_1 = nn.Conv1d(conv_width, hidden_dim, 3, 1, padding, dilation) self.conv1d_2 = nn.Conv1d(hidden_dim, conv_width, 1, 1, 0) def forward(self, hidden_states): residuals = hidden_states hidden_states = self.activation(hidden_states) hidden_states = self.conv1d_1(hidden_states) hidden_states = self.activation(hidden_states) hidden_states = self.conv1d_2(hidden_states) return residuals + self.res_scale * hidden_states class JukeboxResnet1D(nn.Module): def __init__(self, config, conv_width, n_depth, reverse_dilation=False): super().__init__() self.dilation_cycle = config.res_dilation_cycle res_scale = 1.0 if not config.conv_res_scale else 1.0 / math.sqrt(n_depth) blocks = [] for depth in range(n_depth): block_depth = depth if self.dilation_cycle is None else depth % self.dilation_cycle blocks.append(JukeboxResConv1DBlock(config, conv_width, block_depth, res_scale)) if reverse_dilation: blocks = blocks[::-1] self.resnet_block = nn.ModuleList(blocks) def forward(self, hidden_states): for block in self.resnet_block: hidden_states = block(hidden_states) return hidden_states class JukeboxEncoderConvBlock(nn.Module): def __init__(self, config, embed_dim, hidden_dim, depth, down_t, stride_t): super().__init__() blocks = [] filter_t = stride_t * 2 pad_t = stride_t // 2 if down_t > 0: for i in range(down_t): blocks.append(nn.Conv1d(embed_dim if i == 0 else hidden_dim, hidden_dim, filter_t, stride_t, pad_t)) blocks.append(JukeboxResnet1D(config, hidden_dim, depth)) self.proj_out = nn.Conv1d(hidden_dim, config.embed_dim, 3, 1, 1) self.downsample_block = nn.ModuleList(blocks) def forward(self, hidden_states): for block in self.downsample_block: hidden_states = block(hidden_states) hidden_states = self.proj_out(hidden_states) return hidden_states class JukeboxEncoder(nn.Module): def __init__(self, config, width, depth, levels, downs_t, strides_t): super().__init__() self.levels = levels self.level_blocks = nn.ModuleList() iterator = zip(list(range(self.levels)), downs_t, strides_t) for i, down_t, stride_t in iterator: self.level_blocks.append( JukeboxEncoderConvBlock( config, config.conv_input_shape if i == 0 else config.embed_dim, width, depth, down_t, stride_t ) ) def forward(self, hidden_states): all_hidden_states = [] # 64, 32, ... for level in range(self.levels): level_block = self.level_blocks[level] hidden_states = level_block(hidden_states) all_hidden_states.append(hidden_states) return all_hidden_states class JukeboxDecoderConvBock(nn.Module): def __init__(self, config, embed_dim, hidden_dim, depth, down_t, stride_t, reverse_dilation=True): self.embed_dim = embed_dim self.hidden_dim = hidden_dim super().__init__() blocks = [] if down_t > 0: filter_t = stride_t * 2 pad_t = stride_t // 2 self.proj_in = nn.Conv1d(embed_dim, hidden_dim, 3, 1, 1) for i in range(down_t): blocks.append(JukeboxResnet1D(config, hidden_dim, depth, reverse_dilation)) blocks.append( nn.ConvTranspose1d( hidden_dim, hidden_dim if i < down_t - 1 else embed_dim, filter_t, stride_t, pad_t ) ) self.upsample_block = nn.ModuleList(blocks) def forward(self, hidden_states): hidden_states = self.proj_in(hidden_states) for block in self.upsample_block: hidden_states = block(hidden_states) return hidden_states class JukeboxDecoder(nn.Module): def __init__(self, config, hidden_dim, depth, levels, downs_t, strides_t): super().__init__() self.levels = levels self.level_blocks = nn.ModuleList() for level, down_t, stride_t in zip(list(range(self.levels)), downs_t, strides_t): self.level_blocks.append( JukeboxDecoderConvBock(config, config.embed_dim, hidden_dim, depth, down_t, stride_t) ) self.out = nn.Conv1d(config.embed_dim, config.conv_input_shape, 3, 1, 1) def forward(self, hidden_states, all_levels=True): hidden_state = hidden_states[-1] # 32, 64 ... for level in reversed(range(self.levels)): level_block = self.level_blocks[level] hidden_state = level_block(hidden_state) if level != 0 and all_levels: hidden_state = hidden_state + hidden_states[level - 1] hidden_state = self.out(hidden_state) return hidden_state class JukeboxBottleneckBlock(nn.Module): def __init__(self, config: JukeboxVQVAEConfig): super().__init__() self.nb_discrete_codes = config.nb_discrete_codes self.codebook_width = config.embed_dim self.mu = config.lmu self.threshold = 1.0 self.init = False self.codebook_sum = None self.codebook_elem = None self.register_buffer("codebook", torch.zeros(self.nb_discrete_codes, self.codebook_width)) def _tile(self, hidden_states): dim, embed_width = hidden_states.shape if dim < self.nb_discrete_codes: n_repeats = (self.nb_discrete_codes + dim - 1) // dim std = 0.01 / np.sqrt(embed_width) hidden_states = hidden_states.repeat(n_repeats, 1) hidden_states = hidden_states + torch.randn_like(hidden_states) * std return hidden_states def init_codebook(self, hidden_states): nb_discrete_codes = self.nb_discrete_codes self.init = True codes = self._tile(hidden_states) self.codebook = codes[torch.randperm(codes.shape[0])][:nb_discrete_codes] self.codebook_sum = self.codebook self.codebook_elem = torch.ones(nb_discrete_codes, device=self.codebook.device) def update_codebook(self, hidden_states, latent_states): mu, codebook_width, nb_discrete_codes = self.mu, self.codebook_width, self.nb_discrete_codes with torch.no_grad(): # Calculate new centres # nb_discrete_codes, batch_size * seq_length latent_states_onehot = torch.zeros(nb_discrete_codes, hidden_states.shape[0], device=hidden_states.device) latent_states_onehot.scatter_(0, latent_states.view(1, hidden_states.shape[0]), 1) _codebook_sum = torch.matmul(latent_states_onehot, hidden_states) _codebook_elem = latent_states_onehot.sum(dim=-1) # nb_discrete_codes codes = self._tile(hidden_states) _random_codebook = codes[torch.randperm(codes.shape[0])][:nb_discrete_codes] # Update centres old_codebook = self.codebook self.codebook_sum = mu * self.codebook_sum + (1.0 - mu) * _codebook_sum self.codebook_elem = mu * self.codebook_elem + (1.0 - mu) * _codebook_elem # nb_discrete_codes usage = (self.codebook_elem.view(nb_discrete_codes, 1) >= self.threshold).float() norm_code = self.codebook_sum.view(nb_discrete_codes, codebook_width) / self.codebook_elem.view( nb_discrete_codes, 1 ) self.codebook = usage * (norm_code) + (1 - usage) * _random_codebook _codebook_prob = _codebook_elem / torch.sum(_codebook_elem) # prob of each bin entropy = -torch.sum(_codebook_prob * torch.log(_codebook_prob + 1e-8)) # entropy ie how diverse used_curr = (_codebook_elem >= self.threshold).sum() usage = torch.sum(usage) dk = torch.norm(self.codebook - old_codebook) / np.sqrt(np.prod(old_codebook.shape)) return {"entropy": entropy, "used_curr": used_curr, "usage": usage, "dk": dk} def preprocess(self, hidden_states): hidden_states = hidden_states.permute(0, 2, 1).contiguous() hidden_states = hidden_states.view(-1, hidden_states.shape[-1]) if hidden_states.shape[-1] == self.codebook_width: prenorm = torch.norm(hidden_states - torch.mean(hidden_states)) / np.sqrt(np.prod(hidden_states.shape)) elif hidden_states.shape[-1] == 2 * self.codebook_width: x1, x2 = hidden_states[..., : self.codebook_width], hidden_states[..., self.codebook_width :] prenorm = (torch.norm(x1 - torch.mean(x1)) / np.sqrt(np.prod(x1.shape))) + ( torch.norm(x2 - torch.mean(x2)) / np.sqrt(np.prod(x2.shape)) ) # Normalise hidden_states = x1 + x2 return hidden_states, prenorm def postprocess(self, latent_states, dequantised_states, x_shape): batch_size, time = x_shape dequantised_states = dequantised_states.view(batch_size, time, -1).permute(0, 2, 1).contiguous() latent_states = latent_states.view(batch_size, time) return latent_states, dequantised_states def quantise(self, latent_states): # Calculate latent code latent_states codebook_weights = self.codebook.t() distance = ( torch.sum(latent_states**2, dim=-1, keepdim=True) - 2 * torch.matmul(latent_states, codebook_weights) + torch.sum(codebook_weights**2, dim=0, keepdim=True) ) # (batch_size * latent_states , codebook_weights) min_distance, music_tokens = torch.min(distance, dim=-1) fit = torch.mean(min_distance) return music_tokens, fit def dequantise(self, music_tokens): dequantised_states = F.embedding(music_tokens, self.codebook) return dequantised_states def encode(self, latent_states): samples, _, seq_len = latent_states.shape # Preprocess. latent_states, _ = self.preprocess(latent_states) # Quantise music_tokens, _ = self.quantise(latent_states) # Postprocess. music_tokens = music_tokens.view(samples, seq_len) return music_tokens def decode(self, music_tokens): samples, seq_len = music_tokens.shape # Dequantise dequantised_states = self.dequantise(music_tokens) # Postprocess dequantised_states = ( dequantised_states.view(samples, seq_len, self.codebook_width).permute(0, 2, 1).contiguous() ) return dequantised_states def forward(self, hidden_states, update_codebook=True): samples, _, seq_len = hidden_states.shape # Preprocess hidden_states, prenorm = self.preprocess(hidden_states) # Init codebook if not inited if update_codebook and not self.init: self.init_codebook(hidden_states) # Quantise and dequantise through bottleneck music_tokens, fit = self.quantise(hidden_states) dequantised_states = self.dequantise(music_tokens) # Update embeddings if update_codebook: update_metrics = self.update_codebook(hidden_states, music_tokens) else: update_metrics = {} # Loss commit_loss = torch.norm(dequantised_states.detach() - hidden_states) ** 2 / np.prod(hidden_states.shape) # Passthrough dequantised_states = hidden_states + (dequantised_states - hidden_states).detach() # Postprocess music_tokens, dequantised_states = self.postprocess(music_tokens, dequantised_states, (samples, seq_len)) return music_tokens, dequantised_states, commit_loss, dict(fit=fit, pn=prenorm, **update_metrics) class JukeboxBottleneck(nn.Module): def __init__(self, config, levels): super().__init__() self.levels = levels self.level_blocks = nn.ModuleList() for level in range(self.levels): self.level_blocks.append(JukeboxBottleneckBlock(config)) def encode(self, raw_audio): music_tokens = [ level_block.encode(hidden_states) for (level_block, hidden_states) in zip(self.level_blocks, raw_audio) ] return music_tokens def decode(self, music_tokens, start_level=0, end_level=None): if end_level is None: end_level = self.levels quantised_audio = [ level_block.decode(z) for (level_block, z) in zip(self.level_blocks[start_level:end_level], music_tokens) ] return quantised_audio def forward(self, input_audio): music_tokens, quantised_states, commit_losses, metrics = [], [], [], [] for level in range(self.levels): level_block = self.level_blocks[-level - 1] hidden_states = input_audio[level] sampled_tokens, quantised_state, commit_loss, metric = level_block( hidden_states, update_codebook=self.training ) music_tokens.append(sampled_tokens) if not self.training: # Be extra paranoid and make sure the encoder weights can't # change from straight-through estimator quantised_state = quantised_state.detach() quantised_states.append(quantised_state) commit_losses.append(commit_loss) if self.training: metrics.append(metric) return music_tokens, quantised_states, commit_losses, metrics JUKEBOX_START_DOCSTRING = r""" This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config (`JukeboxConfig`): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ @add_start_docstrings( """The Hierarchical VQ-VAE model used in Jukebox. This model follows the Hierarchical VQVAE paper from [Will Williams, Sam Ringer, Tom Ash, John Hughes, David MacLeod, Jamie Dougherty](https://arxiv.org/abs/2002.08111). """, JUKEBOX_START_DOCSTRING, ) class JukeboxVQVAE(PreTrainedModel): config_class = JukeboxVQVAEConfig base_model_prefix = "vqvae" _keys_to_ignore_on_load_unexpected = [r"priors"] def _init_weights(self, module): if isinstance(module, nn.Embedding): # embed_tokens module.weight.data.normal_(mean=0.0, std=0.02 * self.config.init_scale) elif isinstance(module, JukeboxConv1D): if self.config.zero_out: module.weight.data.zero_() else: module.weight.data.normal_(mean=0.0, std=0.02 * self.config.init_scale) elif isinstance(module, JukeboxResConv1DBlock) and self.config.zero_out: module.conv1d_2.weight.data.zero_() module.conv1d_2.bias.data.zero_() if isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) if isinstance(module, nn.Linear) and module.bias is not None: module.bias.data.zero_() def __init__(self, config: JukeboxVQVAEConfig): super().__init__(config) downs_t = config.res_downs_t strides_t = config.res_strides_t if not config.sample_length: downsamples = [stride**down for stride, down in zip(strides_t, downs_t)] top_raw_to_tokens = np.prod(downsamples) config.sample_length = ( config.sample_length_in_seconds * config.sampling_rate // top_raw_to_tokens ) * top_raw_to_tokens config.sample_length = config.sample_length.astype(int) self.nb_discrete_codes = config.nb_discrete_codes self.commit = config.commit self.sample_length = config.sample_length self.downsamples = [stride**down for stride, down in zip(strides_t, downs_t)] self.hop_lengths = np.cumprod(self.downsamples) self.levels = levels = config.levels self.music_tokens_shapes = [ (int(self.sample_length // self.hop_lengths[-level - 1])) for level in range(levels) ] self.multipliers = config.multipliers if config.multipliers is not None else [1] * levels self.encoders = nn.ModuleList() self.decoders = nn.ModuleList() for level in range(levels): width = config.res_conv_width * self.multipliers[level] depth = config.res_conv_depth * self.multipliers[level] self.encoders.append( JukeboxEncoder(config, width, depth, level + 1, downs_t[: level + 1], strides_t[: level + 1]) ) self.decoders.append( JukeboxDecoder(config, width, depth, level + 1, downs_t[: level + 1], strides_t[: level + 1]) ) self.bottleneck = JukeboxBottleneck(config, levels) def _decode(self, music_tokens, start_level=0, end_level=None): # Decode if end_level is None: end_level = self.levels latent_states = self.bottleneck.decode(music_tokens, start_level=start_level, end_level=end_level) # Use only lowest level decoder, dequantised_state = self.decoders[start_level], latent_states[0:1] dequantised_state = decoder(dequantised_state, all_levels=False) dequantised_state = dequantised_state.permute(0, 2, 1) return dequantised_state def decode(self, music_tokens, start_level=0, end_level=None, bs_chunks=1) -> torch.Tensor: """ Transforms the input `music_tokens` to their `raw_audio` representation. Args: music_tokens (`torch.LongTensor`): Tensor of music tokens which will be decoded to raw audio by using the codebook. Each music token should be an index to a corresponding `code` vector in the codebook. start_level (`int`, *optional*): Level at which the decoding process will start. Default to 0. end_level (`int`, *optional*): Level at which the decoding process will start. Default to None. bs_chunks (int, *optional*): Number of chunks to process at the same time. """ token_chunks = [torch.chunk(token, bs_chunks, dim=0) for token in music_tokens] dequantised_states = [] for i in range(bs_chunks): music_tokens_i = [chunks[i] for chunks in token_chunks] dequantised_state = self._decode(music_tokens_i, start_level=start_level, end_level=end_level) dequantised_states.append(dequantised_state) return torch.cat(dequantised_states, dim=0) def _encode(self, raw_audio, start_level=0, end_level=None): # Encode if end_level is None: end_level = self.levels input_audio = raw_audio.permute(0, 2, 1).float() latent_states = [] for level in range(self.levels): encoder = self.encoders[level] latent_state = encoder(input_audio) latent_states.append(latent_state[-1]) music_tokens = self.bottleneck.encode(latent_states) return music_tokens[start_level:end_level] def encode(self, input_audio, start_level=0, end_level=None, bs_chunks=1): """ Transforms the `input_audio` to a discrete representation made out of `music_tokens`. Args: input_audio (`torch.Tensor`): Raw audio which will be encoded to its discrete representation using the codebook. The closest `code` form the codebook will be computed for each sequence of samples. start_level (`int`, *optional*, defaults to 0): Level at which the encoding process will start. Default to 0. end_level (`int`, *optional*): Level at which the encoding process will start. Default to None. bs_chunks (int, *optional*, defaults to 1): Number of chunks of raw audio to process at the same time. """ audio_chunks = torch.chunk(input_audio, bs_chunks, dim=0) music_tokens_list = [] for chunk_i in audio_chunks: music_tokens_i = self._encode(chunk_i, start_level=start_level, end_level=end_level) music_tokens_list.append(music_tokens_i) music_tokens = [torch.cat(music_tokens_level, dim=0) for music_tokens_level in zip(*music_tokens_list)] return music_tokens def sample(self, n_samples): music_tokens = [ torch.randint(0, self.nb_discrete_codes, size=(n_samples, *music_tokens_shape), device="cpu") for music_tokens_shape in self.music_tokens_shapes ] return self.decode(music_tokens) def forward(self, raw_audio: torch.FloatTensor) -> Tuple[torch.Tensor, torch.Tensor]: """ Forward pass of the VQ-VAE, encodes the `raw_audio` to latent states, which are then decoded for each level. The commit loss, which ensure that the encoder's computed embeddings are close to the codebook vectors, is computed. Args: raw_audio (`torch.FloatTensor`): Audio input which will be encoded and decoded. Returns: `Tuple[torch.Tensor, torch.Tensor]` Example: ```python >>> from transformers import JukeboxVQVAE, set_seed >>> import torch >>> model = JukeboxVQVAE.from_pretrained("openai/jukebox-1b-lyrics").eval() >>> set_seed(0) >>> zs = [torch.randint(100, (4, 1))] >>> model.decode(zs).shape torch.Size([4, 8, 1]) ``` """ # Encode/Decode input_audio = raw_audio.permute(0, 2, 1).float() latent_states = [] for level in range(self.levels): encoder = self.encoders[level] latent_state = encoder(input_audio) latent_states.append(latent_state[-1]) _, music_tokens, commit_losses, _ = self.bottleneck(latent_states) dequantised_states = [] for level in range(self.levels): decoder = self.decoders[level] dequantised_state = decoder(music_tokens[level : level + 1], all_levels=False) dequantised_states.append(dequantised_state.permute(0, 2, 1)) commit_loss = sum(commit_losses) loss = self.commit * commit_loss return dequantised_states, loss class JukeboxMLP(nn.Module): def __init__(self, config): # a single channel is always used in original code super().__init__() embed_dim = config.hidden_size hidden_dim = int(config.mlp_multiplier * embed_dim) self.c_fc = JukeboxConv1D(embed_dim, hidden_dim) self.c_proj = JukeboxConv1D(hidden_dim, embed_dim) self.act = ACT2FN[config.act_fn] self.dropout = nn.Dropout(config.resid_dropout) def forward(self, hidden_states): hidden_states = self.c_fc(hidden_states) hidden_states = self.act(hidden_states) hidden_states = self.c_proj(hidden_states) hidden_states = self.dropout(hidden_states) return hidden_states class JukeboxLayerNorm(FusedLayerNorm): def __init__(self, normalized_shape, eps=1e-5, elementwise_affine=True): super().__init__(normalized_shape, eps=eps, elementwise_affine=elementwise_affine) self.width = np.prod(normalized_shape) self.max_numel = 65535 * self.width def forward(self, input): if input.numel() > self.max_numel: return F.layer_norm(input, self.normalized_shape, self.weight, self.bias, self.eps).type_as(input) else: return super().forward(input).type_as(input) class JukeboxAttention(nn.Module): def __init__(self, config, n_ctx, attn_func="dense_attn"): super().__init__() self.embed_dim = config.hidden_size self.n_heads = config.n_heads self.dropout = config.attn_dropout hidden_dim = int(config.attention_multiplier * self.embed_dim) self.head_dim = hidden_dim // config.n_heads self.n_ctx = n_ctx self.hidden_dim = hidden_dim self.scale = self.head_dim**-0.25 self.mask = config.mask if attn_func == "cross_attention": self.c_attn = JukeboxConv1D(self.embed_dim, hidden_dim) self.c_enc_kv = JukeboxConv1D(self.embed_dim, hidden_dim * 2) else: self.c_attn = JukeboxConv1D(self.embed_dim, hidden_dim * 3) self.c_proj = JukeboxConv1D(hidden_dim, self.embed_dim) self.attn_dropout = nn.Dropout(config.attn_dropout) self.resid_dropout = nn.Dropout(config.resid_dropout) # Sequence of length seq_len is factored as [blocks, seq_len // blocks] self.attn_func = attn_func if attn_func == "cross_attention": self.qkv = self.decode_qkv elif attn_func == "prime_attn": self.qkv = self.prime_qkv else: self.qkv = self.factored_qkv ATTENTION_MAP = { "dense_attn": (self.dense_attn, "autoregressive"), "block_attn": (self.block_attn, "autoregressive"), "transpose_block_attn": (self.transpose_block_attn, "autoregressive"), "prev_block_attn": (self.prev_block_attn, None), "summary_attn": (self.summary_attn, "summary"), "summary_spread_attn": (self.summary_spread_attn, "summary"), "cross_attention": (self.dense_attn, None), "prime_attn": (self.prime_attn, "prime"), } self.attn, self.attn_mask = ATTENTION_MAP[attn_func] self.blocks = config.blocks self.spread = config.spread if self.blocks is not None: self.block_ctx = self.n_ctx // self.blocks self.sample_t = 0 self.cache = {} self.encoder_len = config.nb_relevant_lyric_tokens # length of the encoder input ids self.record_attn = False def _attn(self, query_states, key_states, value_states, sample): scale = self.scale if self.training: attention_weight = torch.matmul(query_states * scale, key_states * scale) else: attention_weight = torch.matmul(query_states, key_states) attention_weight.mul_(scale * scale) attn_weight_type = attention_weight.dtype attention_weight = attention_weight.float() if self.mask: # Generate appropriate mask to mask out all positions before current # Might take up lot of memory for dense, so can cache it mask = get_mask( self.attn_mask, query_states.size(-2), key_states.size(-1), self.blocks, self.spread, attention_weight.device, sample, self.sample_t, ) if mask is not None: attention_weight = attention_weight * mask + -1e9 * (1 - mask) attention_prob = F.softmax(attention_weight, dim=-1).type(attn_weight_type) if self.record_attn: self.attention_prob = attention_prob if self.attn_func == "prime_attn": # only keep music queries and lyrics keys/values self.attention_prob = self.attention_prob[:, :, self.encoder_len :, : self.encoder_len] attention_prob = self.attn_dropout(attention_prob) context_states = torch.matmul(attention_prob, value_states) return context_states def merge_heads(self, hidden_states): hidden_states = hidden_states.permute(0, 2, 1, 3).contiguous() new_hidden_states_shape = (*hidden_states.size()[:-2], hidden_states.size(-2) * hidden_states.size(-1)) return hidden_states.view(*new_hidden_states_shape) # in Tensorflow implem: fct merge_states def split_heads(self, hidden_states, is_key=False): new_hidden_states_shape = ( *hidden_states.size()[:-1], self.n_heads, hidden_states.size(-1) // self.n_heads, ) hidden_states = hidden_states.view(*new_hidden_states_shape) # in Tensorflow implem: fct split_states if is_key: return hidden_states.permute(0, 2, 3, 1) else: return hidden_states.permute(0, 2, 1, 3) def dense_attn(self, query, key, value, sample): query = self.split_heads(query) key = self.split_heads(key, is_key=True) value = self.split_heads(value) context_states = self._attn(query, key, value, sample) context_states = self.merge_heads(context_states) return context_states def block_attn(self, query, key, value, sample): block_ctx = self.block_ctx batch_size, seq_len, embed_dim = value.shape # For sample, query_len= 1, key_len = value_len = sample_t if sample: return self.dense_attn(query, key, value, sample).view(batch_size, 1, embed_dim) else: query_length = query.shape[1] query = query.view(batch_size * query_length // block_ctx, block_ctx, embed_dim) if query_length < seq_len: seq_len = query_length key = key[:, -seq_len:].contiguous() value = value[:, -seq_len:].contiguous() key = key.view(batch_size * seq_len // block_ctx, block_ctx, embed_dim) value = value.view(batch_size * seq_len // block_ctx, block_ctx, embed_dim) return self.dense_attn(query, key, value, sample).view(batch_size, seq_len, embed_dim) def transpose_block_attn(self, query, key, value, sample): block_ctx = self.block_ctx batch_size, seq_len, embed_dim = value.shape # For sample, query_len= 1, key_len = value_len = sample_t if sample: block_len = (seq_len - 1) % block_ctx key = key[:, block_len::block_ctx, :] value = value[:, block_len::block_ctx, :] return self.dense_attn(query, key, value, sample).view(batch_size, 1, embed_dim) else: query_length = query.shape[1] query = query.view(batch_size, query_length // block_ctx, block_ctx, embed_dim) query = query.transpose(1, 2).contiguous() query = query.view(batch_size * block_ctx, query_length // block_ctx, embed_dim) key = key.view(batch_size, seq_len // block_ctx, block_ctx, embed_dim) key = key.transpose(1, 2).contiguous() key = key.view(batch_size * block_ctx, seq_len // block_ctx, embed_dim) value = value.view(batch_size, seq_len // block_ctx, block_ctx, embed_dim) value = value.transpose(1, 2).contiguous() value = value.view(batch_size * block_ctx, seq_len // block_ctx, embed_dim) block_attn = self.dense_attn(query, key, value, sample) block_attn = block_attn.view(batch_size, block_ctx, query_length // block_ctx, embed_dim) block_attn = block_attn.transpose(1, 2).contiguous() block_attn = block_attn.view(batch_size, query_length, embed_dim) return block_attn def prev_block_attn(self, query, key, value, sample): block_ctx = self.block_ctx batch_size, seq_len, embed_dim = value.shape # For sample, query_len= 1, key_len = value_len = sample_t if sample: block = (seq_len - 1) // block_ctx prev_l = (block - 1) * block_ctx if block > 0: key = key[:, prev_l : prev_l + block_ctx, :] value = value[:, prev_l : prev_l + block_ctx, :] else: key = torch.zeros(batch_size, block_ctx, embed_dim, device=query.device, dtype=query.dtype) value = torch.zeros(batch_size, block_ctx, embed_dim, device=query.device, dtype=query.dtype) return self.dense_attn(query, key, value, sample).view(batch_size, 1, embed_dim) else: query_length = query.shape[1] query = query.view(batch_size * query_length // block_ctx, block_ctx, embed_dim) key = key.view(batch_size, seq_len // block_ctx, block_ctx, embed_dim)[:, :-1, :, :] key = torch.nn.functional.pad(key, (0, 0, 0, 0, 1, 0)) key = key.view(batch_size * seq_len // block_ctx, block_ctx, embed_dim) value = value.view(batch_size, seq_len // block_ctx, block_ctx, embed_dim)[:, :-1, :, :] value = torch.nn.functional.pad(value, (0, 0, 0, 0, 1, 0)) value = value.view(batch_size * seq_len // block_ctx, block_ctx, embed_dim) if query_length < seq_len: nb_query_blocks = query_length // block_ctx nb_key_blocks = seq_len // block_ctx seq_len = query_length key = key.view(batch_size, nb_key_blocks, block_ctx, embed_dim)[:, -nb_query_blocks:] key = key.contiguous().view(batch_size * nb_query_blocks, block_ctx, embed_dim) value = value.view(batch_size, nb_key_blocks, block_ctx, embed_dim)[:, -nb_query_blocks:] value = value.contiguous().view(batch_size * nb_query_blocks, block_ctx, embed_dim) return self.dense_attn(query, key, value, sample).view(batch_size, seq_len, embed_dim) def summary_attn(self, query, key, value, sample): blocks = self.blocks block_ctx = self.block_ctx batch_size, seq_len, embed_dim = value.shape # For sample, query_len= 1, key_len = value_len = sample_t if sample: key = key[:, block_ctx - 1 : blocks * block_ctx - 1 : block_ctx, :] key = torch.nn.functional.pad(key, (0, 0, 1, 0)) value = value[:, block_ctx - 1 : blocks * block_ctx - 1 : block_ctx, :] value = torch.nn.functional.pad(value, (0, 0, 1, 0)) return self.dense_attn(query, key, value, sample).view(batch_size, 1, embed_dim) else: key = key.view(batch_size, blocks, seq_len // blocks, embed_dim)[:, :-1, -1, :] key = torch.nn.functional.pad(key, (0, 0, 1, 0)) # batch_size, blocks, embed_dim value = value.view(batch_size, blocks, seq_len // blocks, embed_dim)[:, :-1, -1, :] value = torch.nn.functional.pad(value, (0, 0, 1, 0)) # batch_size, blocks, embed_dim return self.dense_attn(query, key, value, sample).view(batch_size, seq_len, embed_dim) def summary_spread_attn(self, query, key, value, sample): blocks = self.blocks spread = self.spread batch_size, seq_len, embed_dim = value.shape # For sample, query_len= 1, key_len = value_len = sample_t if sample: raise NotImplementedError else: key = key.view(batch_size, blocks, seq_len // blocks, embed_dim)[:, :-1, -spread:, :] key = torch.nn.functional.pad(key, (0, 0, 0, 0, 1, 0)).contiguous() key = key.view(batch_size, blocks * spread, embed_dim) value = value.view(batch_size, blocks, seq_len // blocks, embed_dim)[:, :-1, -spread:, :] value = torch.nn.functional.pad(value, (0, 0, 0, 0, 1, 0)).contiguous() value = value.view(batch_size, blocks * spread, embed_dim) return self.dense_attn(query, key, value, sample).view(batch_size, seq_len, embed_dim) def prime_attn(self, query, key, value, sample): encoder_len = self._encoder_len key = key[:, :encoder_len] value = value[:, :encoder_len] return self.dense_attn(query, key, value, sample) def factored_qkv(self, hidden_states, last_encoder_hidden_states=None, sample=False): curr_ctx = hidden_states.shape[1] if last_encoder_hidden_states is not None: raise TypeError("last_encoder_hidden_states should be None") query, key, value = hidden_states.chunk(3, dim=2) if sample: self.sample_t += curr_ctx key, value = self._append_cache(key, value) l_cache = self._suff_cache_len() if self._cache_len() > l_cache: self._slice_cache(-l_cache) if curr_ctx > 1: if self.attn_func != "dense_attn": query = self._pad_to_block_ctx(query, query=True) key = self._pad_to_block_ctx(key) value = self._pad_to_block_ctx(value) sample = False else: key = self.cache["key"] value = self.cache["value"] return query, key, value, sample def prime_qkv(self, hidden_states, last_encoder_hidden_states=None, sample=False): curr_ctx = hidden_states.shape[1] if last_encoder_hidden_states is not None: raise TypeError("last_encoder_hidden_states should be None") query, key, value = hidden_states.chunk(3, dim=2) if sample: if self._cache_len() < self._encoder_len: self._append_cache(key, value) if self._cache_len() > self._encoder_len: self._slice_cache(0, self._encoder_len) key, value = self.cache["key"], self.cache["value"] self.sample_t += curr_ctx return query, key, value, sample def decode_qkv(self, hidden_states, last_encoder_hidden_states=None, sample=False): curr_ctx = hidden_states.shape[1] query = hidden_states if sample: if self.sample_t == 0: self.cache["key"], self.cache["value"] = self.c_enc_kv( last_encoder_hidden_states.type_as(hidden_states) ).chunk(2, dim=2) key, value = self.cache["key"], self.cache["value"] self.sample_t += curr_ctx else: key, value = self.c_enc_kv(last_encoder_hidden_states.type_as(hidden_states)).chunk(2, dim=2) return query, key, value, sample def forward(self, hidden_states, last_encoder_hidden_states=None, sample=False): curr_ctx = hidden_states.shape[1] hidden_states = self.c_attn(hidden_states) query, key, value, sample = self.qkv( hidden_states, last_encoder_hidden_states=last_encoder_hidden_states, sample=sample ) attention_scores = self.attn(query, key, value, sample) if attention_scores.shape[1] != curr_ctx: offset = self._offset(curr_ctx) attention_scores = attention_scores[:, offset : offset + curr_ctx, :].contiguous() attention_scores = self.c_proj(attention_scores) return self.resid_dropout(attention_scores) @property def _encoder_len(self): encoder_len = self.encoder_len encoder_blocks = (encoder_len // self.blocks) + 1 return encoder_blocks * self.blocks def _offset(self, curr_ctx): if self.attn_func == "dense_attn": return 0 return (self.sample_t - curr_ctx) % self.block_ctx def _pad_to_block_ctx(self, hidden_states, query=False): seq_len = hidden_states.shape[1] offset = self._offset(seq_len) if query else 0 n_blocks = (seq_len + offset + self.block_ctx - 1) // self.block_ctx pad = n_blocks * self.block_ctx - seq_len - offset if pad == 0 and offset == 0: return hidden_states else: return F.pad(hidden_states, (0, 0, offset, pad)) def _cache_len(self): return 0 if "key" not in self.cache else self.cache["key"].shape[1] def _suff_cache_len(self): """ Precondition: key and value are appended with the current context and self.sample_t reflects the 1-indexed sample location in the context. """ previous_block_length = (self.sample_t - 1) % self.block_ctx + 1 + self.block_ctx REQUIRED_CACHE_LEN = { "dense_attn": self.sample_t, "block_attn": (self.sample_t - 1) % self.block_ctx + 1, "transpose_block_attn": self.sample_t, "prev_block_attn": self.sample_t if self.sample_t <= self.block_ctx else previous_block_length, "cross_attn": self.encoder_len, "prime_attn": min(self.sample_t, self._encoder_len), } return REQUIRED_CACHE_LEN[self.attn_func] def _slice_cache(self, start, end=None): self.cache["key"] = self.cache["key"][:, start:end] self.cache["value"] = self.cache["value"][:, start:end] def _append_cache(self, key, value): if "key" not in self.cache: self.cache["key"] = key self.cache["value"] = value else: old_key, old_value = key, value key = torch.cat([self.cache["key"], old_key], dim=1) value = torch.cat([self.cache["value"], old_value], dim=1) del self.cache["key"] del self.cache["value"] del old_key del old_value self.cache["key"] = key self.cache["value"] = value return self.cache["key"], self.cache["value"] def del_cache(self): self.sample_t = 0 if "key" in self.cache: del self.cache["key"] if "value" in self.cache: del self.cache["value"] self.cache = {} class JukeboxBlock(nn.Module): def __init__(self, config, n_ctx, attn_func="dense_attn"): super().__init__() self.width = config.hidden_size self.attn = JukeboxAttention(config, n_ctx, attn_func=attn_func) self.layer_norm_0 = JukeboxLayerNorm(config.hidden_size) self.mlp = JukeboxMLP(config) self.layer_norm_1 = JukeboxLayerNorm(config.hidden_size) self.res_scale = 1.0 / config.num_layers if config.attn_res_scale else 1.0 self.attn_func = attn_func def forward(self, hidden_states, last_encoder_hidden_states, sample=False): residuals = hidden_states hidden_states = self.layer_norm_0(hidden_states) hidden_states = self.attn(hidden_states, last_encoder_hidden_states, sample) output_states = self.layer_norm_1(residuals + hidden_states) output_states = self.mlp(output_states) if self.res_scale == 1.0: output = residuals + hidden_states + output_states else: output = residuals + self.res_scale * (hidden_states + output_states) return output class JukeboxLayerStack(nn.Module): def __init__(self, config, n_ctx): super().__init__() self.n_ctx = n_ctx self.width = config.hidden_size self.num_layers = config.num_layers self.blocks = config.blocks self.attention_pattern = config.attention_pattern if self.blocks is not None: self.block_ctx = n_ctx // self.blocks self.encoder_len = config.nb_relevant_lyric_tokens self.n_heads = config.n_heads # Orders of attn_func attention_pattern = ATTENTION_PATTERNS[self.attention_pattern] self._attn_mods = nn.ModuleList() for depth in range(self.num_layers): self._attn_mods.append(JukeboxBlock(config, n_ctx, attn_func=attention_pattern(depth))) self.saved_attn_weights = [] def set_record_attn(self, record_attn): """ Makes forward prop dump self-attention softmaxes to self.saved_attn_weights. Args: record_attn (`Union[bool,set]`): Either a set of layer indices indicating which layers to store, or a boolean value indicating Whether to dump all. """ def _should_record_attn(layer_idx): if isinstance(record_attn, bool): return record_attn return layer_idx in record_attn for i, layer in enumerate(self._attn_mods): layer.attn.record_attn = _should_record_attn(i) if not record_attn: self.saved_attn_weights = [] def forward(self, hidden_states, last_encoder_hidden_states=None, sample=False): # Blocks for i, attn_layer in enumerate(self._attn_mods): if attn_layer.attn_func == "cross_attention": # attend to the lyrics hidden_states = attn_layer( hidden_states, last_encoder_hidden_states=last_encoder_hidden_states, sample=sample ) else: hidden_states = attn_layer(hidden_states, last_encoder_hidden_states=None, sample=sample) if attn_layer.attn.record_attn: self.saved_attn_weights.append(attn_layer.attn.c_attn.weight) return hidden_states def del_cache(self): for attn_layer in self._attn_mods: attn_layer.attn.del_cache() class JukeboxPositionalEmbedding(nn.Module): def __init__(self, embed_dim, width): super().__init__() self.pos_emb = nn.Parameter(torch.empty((embed_dim, width))) def forward(self): pos_emb = self.pos_emb return pos_emb class JukeboxConditionalAutoregressive(nn.Module): def __init__( self, config, n_ctx=None, embed_dim=None, audio_conditioning=False, metadata_conditioning=False, is_encoder=False, ): """ Autoregressive model on either lyric tokens or music tokens, or both. The attention pattern should be properly set fro each configuration. Args: config (`JukeboxPriorConfig`): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. n_ctx (`int`, *optional*): Number of tokens or lyrics tokens provided in a single pass. embed_dim (`int`, *optional*): Either equals to the dimension of the codebook, or the sum of n_vocab (lyrics) and codeboook dimension, if the model combines lyrics and music tokens, or simply n_vocab if the model is a seperate encoder audio_conditioning (`bool`, *optional*, defaults to `False`): Whether or not the prior supports conditionning on audio. metadata_conditioning (`bool`, *optional*, defaults to `False`): Whether or not the prior supports conditionning on artitst, genres, lyrics and timing. is_encoder (`bool`, *optional*, defaults to `False`): Whether the model is an encoder only model. """ super().__init__() self.width = config.hidden_size self.num_layers = config.num_layers self.n_ctx = n_ctx if n_ctx is not None else config.n_ctx self.embed_dim = embed_dim if embed_dim is not None else config.music_vocab_size self.embed_tokens = nn.Embedding(self.embed_dim, config.hidden_size) self.embed_tokens_dropout = nn.Dropout(config.emb_dropout) self.metadata_conditioning = metadata_conditioning self.audio_conditioning = audio_conditioning if not metadata_conditioning: self.start_token = nn.Parameter(torch.empty((1, config.hidden_size))) self.pos_emb = JukeboxPositionalEmbedding(self.n_ctx, config.hidden_size) self.pos_emb_dropout = nn.Dropout(config.emb_dropout) self.transformer = JukeboxLayerStack(config, n_ctx=self.n_ctx) self.is_encoder = is_encoder self.encoder_len = config.nb_relevant_lyric_tokens if config.merged_decoder: # Merged piped model uses this setup self.add_cond_after_transformer = False self.share_embed_tokens_fc_proj_out = False else: self.add_cond_after_transformer = True self.share_embed_tokens_fc_proj_out = True if not is_encoder: self.fc_proj_out = nn.Linear(config.hidden_size, self.embed_dim, bias=False) if self.share_embed_tokens_fc_proj_out: self.fc_proj_out.weight = self.embed_tokens.weight self.loss = torch.nn.CrossEntropyLoss() def forward( self, tokens, audio_conditioning=None, metadata_conditioning=None, last_encoder_hidden_states=None, get_preds=False, get_acts=False, get_sep_loss=False, ): """ Args: tokens (`torch.tensor`): Can represent music tokens, lyrics tokens or both, depending on the configuration. """ # Preprocess. batch_size = tokens.shape[0] with torch.no_grad(): tokens = tokens.view(batch_size, -1).long() if not self.audio_conditioning: audio_conditioning = torch.zeros( (batch_size, 1, self.width), device=tokens.device, dtype=self.transformer._attn_mods[0].mlp.c_fc.weight.dtype, ) target = tokens # Target hidden_states = self.embed_tokens(tokens) # Shift by 1, and fill in start token hidden_states = torch.cat((hidden_states[:, -1:], hidden_states[:, :-1]), dim=1) if self.metadata_conditioning: hidden_states[:, 0] = metadata_conditioning.view(batch_size, self.width) else: hidden_states[:, 0] = self.start_token hidden_states = ( self.embed_tokens_dropout(hidden_states) + self.pos_emb_dropout(self.pos_emb()) + audio_conditioning ) # Pos emb and dropout hidden_states = self.transformer( hidden_states, last_encoder_hidden_states=last_encoder_hidden_states ) # Transformer if self.add_cond_after_transformer: # Piped doesnt add x_cond hidden_states = hidden_states + audio_conditioning activations = hidden_states if self.is_encoder: return hidden_states hidden_states = self.fc_proj_out(hidden_states) # Predictions loss_fn = nn.CrossEntropyLoss() if get_sep_loss: lyric_hidden_states = hidden_states[:, : self.encoder_len].reshape(-1, self.embed_dim) token_hidden_states = hidden_states[:, self.encoder_len :].reshape(-1, self.embed_dim) lyric_loss = loss_fn(lyric_hidden_states, target[:, : self.encoder_len].reshape(-1)) / np.log(2.0) music_token_loss = loss_fn(token_hidden_states, target[:, self.encoder_len :].reshape(-1)) / np.log(2.0) loss = (lyric_loss, music_token_loss) # Note order! Lyric is first else: loss = loss_fn(hidden_states.view(-1, self.embed_dim), target.view(-1)) / np.log(2.0) # Loss if get_preds: return loss, hidden_states elif get_acts: return loss, activations else: return loss, None def get_emb(self, sample_t, n_samples, tokens, audio_conditioning, metadata_conditioning): if sample_t == 0: hidden_states = torch.empty(n_samples, 1, self.width, dtype=self.embed_tokens.weight.dtype).to( self.embed_tokens.weight.device ) if self.metadata_conditioning: hidden_states[:, 0] = metadata_conditioning.view(n_samples, self.width) else: hidden_states[:, 0] = self.start_token else: hidden_states = self.embed_tokens(tokens) if audio_conditioning.shape == (n_samples, self.n_ctx, self.width): cond = audio_conditioning[:, sample_t : sample_t + 1, :] else: cond = audio_conditioning # Pos emb, dropout is identity at eval time hidden_states = hidden_states + self.pos_emb()[sample_t : sample_t + 1] + cond return hidden_states, cond def sample( self, n_samples, audio_conditioning=None, metadata_conditioning=None, last_encoder_hidden_states=None, temp=1.0, top_k=0, top_p=0.0, get_preds=False, sample_tokens=None, ): if sample_tokens is None: sample_tokens = self.n_ctx if not self.audio_conditioning: audio_conditioning = torch.zeros( (n_samples, 1, self.width), dtype=self.transformer._attn_mods[0].mlp.c_fc.weight.dtype ).to(self.fc_proj_out.device) with torch.no_grad(): sampled_tokens = [] tokens = None if get_preds: preds = [] iter = tqdm(range(0, sample_tokens), leave=False) for sample_t in iter: iter.set_description(f"Ancestral sampling {sample_tokens} music tokens", refresh=True) hidden_states, cond = self.get_emb( sample_t, n_samples, tokens, audio_conditioning, metadata_conditioning ) hidden_states = self.transformer( hidden_states, last_encoder_hidden_states=last_encoder_hidden_states, sample=True ) if self.add_cond_after_transformer: hidden_states = hidden_states + cond hidden_states = self.fc_proj_out(hidden_states) # Predictions if get_preds: preds.append(hidden_states.clone()) # Adjust logits hidden_states = hidden_states / temp hidden_states = filter_logits(hidden_states, top_k=top_k, top_p=top_p) # Sample and replace hidden_states tokens = torch.distributions.Categorical(logits=hidden_states).sample() sampled_tokens.append(tokens.clone()) del tokens self.transformer.del_cache() tokens = torch.cat(sampled_tokens, dim=1) if get_preds: preds = torch.cat(preds, dim=1) if get_preds: return tokens, preds else: return tokens def split_chunks(self, length, chunk_size): n_passes = (length + chunk_size - 1) // chunk_size chunk_sizes = [*[chunk_size] * (n_passes - 1), (length - 1) % chunk_size + 1] return chunk_sizes def primed_sample( self, n_samples, lyric_and_music_tokens, audio_conditioning=None, metadata_conditioning=None, last_encoder_hidden_states=None, temp=1.0, top_k=0, top_p=0.0, get_preds=False, chunk_size=None, sample_tokens=None, ): if sample_tokens is None: sample_tokens = self.n_ctx # Preprocess. batch_size = lyric_and_music_tokens.shape[0] with torch.no_grad(): lyric_and_music_tokens = lyric_and_music_tokens.view(batch_size, -1).long() sampled_audio = torch.split(lyric_and_music_tokens, 1, dim=1) sampled_audio = list(sampled_audio) if not self.audio_conditioning: audio_conditioning = torch.zeros( (n_samples, 1, self.width), dtype=self.transformer._attn_mods[0].mlp.c_fc.weight.dtype ).to(lyric_and_music_tokens.device) with torch.no_grad(): if get_preds: preds = [] # Fill up key/value cache for past context by runing forward pass. # We do so in chunks instead of doing the whole past in one forward pass to reduce max memory usage. if chunk_size is None: chunk_size = len(sampled_audio) chunk_sizes = self.split_chunks(len(sampled_audio), chunk_size) x_primes = [] start = 0 token = None for current_chunk_size in tqdm(chunk_sizes, desc="Preparing past key value", leave=False): sampled_audio_prime, conds_prime = [], [] for sample_t in range(start, start + current_chunk_size): x_prime, cond_prime = self.get_emb( sample_t, n_samples, token, audio_conditioning, metadata_conditioning ) token = sampled_audio[sample_t] sampled_audio_prime.append(x_prime) conds_prime.append(cond_prime) start = start + current_chunk_size x_prime, cond_prime = torch.cat(sampled_audio_prime, dim=1), torch.cat(conds_prime, dim=1) del sampled_audio_prime del conds_prime if not get_preds: del cond_prime x_prime = self.transformer(x_prime, last_encoder_hidden_states=last_encoder_hidden_states, sample=True) if get_preds: if self.add_cond_after_transformer: x_prime = x_prime + cond_prime del cond_prime x_primes.append(x_prime) else: del x_prime if get_preds: x_prime = torch.cat(x_primes, dim=1) x_prime = self.fc_proj_out(x_prime) # Predictions preds.append(x_prime) # the input of the encoder and decoder can be merged into (lyrics, music tokens) input_tokens = sampled_audio[-1] itererator = tqdm( range(len(sampled_audio), sample_tokens), desc=f"Sampling {len(range(len(sampled_audio), sample_tokens))} music tokens", leave=False, ) for sample_t in itererator: hidden_states, cond = self.get_emb( sample_t, n_samples, input_tokens, audio_conditioning, metadata_conditioning ) hidden_states = self.transformer( hidden_states, last_encoder_hidden_states=last_encoder_hidden_states, sample=True ) if self.add_cond_after_transformer: hidden_states = hidden_states + cond hidden_states = self.fc_proj_out(hidden_states) # Predictions if get_preds: preds.append(hidden_states) # Adjust logits hidden_states = hidden_states / temp hidden_states = filter_logits(hidden_states, top_k=top_k, top_p=top_p) # only music tokens are sampled music_tokens = torch.distributions.Categorical(logits=hidden_states).sample() sampled_audio.append(music_tokens.clone()) input_tokens = music_tokens del input_tokens, music_tokens self.transformer.del_cache() music_tokens = torch.cat(sampled_audio, dim=1) if get_preds: preds = torch.cat(preds, dim=1) if get_preds: return music_tokens, preds else: return music_tokens class JukeboxMusicTokenConditioner(nn.Module): """ The `JukeboxMusicTokenConditioner` takes music tokens as an input (coresponding to the codes of the VQVAE's codebook) and upsamples it using a single layer of decoder convolution block (the same is used in the VQVAE). """ def __init__(self, config, level): super().__init__() self.embed_tokens = nn.Embedding(config.music_vocab_size, config.hidden_size) config.embed_dim = config.music_vocab_size # setting correct argument for the `JukeboxDecoder` self.upsampler = JukeboxDecoderConvBock( config, config.hidden_size, config.res_conv_width, config.res_conv_depth, config.res_downs_t[level], config.res_strides_t[level], reverse_dilation=False, ) self.layer_norm = JukeboxLayerNorm(config.hidden_size) def forward(self, music_tokens, raw_audio_conditionning=None): """ Args: music_tokens (`torch.LongTensor`): Music tokens form the uper level in range(nb_discrete_codes) raw_audio_conditionning (`torch.LongTensor`, *optional*): Audio used when primed sampling, raw audio information that conditions the generation """ if raw_audio_conditionning is None: raw_audio_conditionning = 0.0 # Embed music_tokens music_tokens = music_tokens.long() hidden_states = self.embed_tokens(music_tokens) hidden_states = hidden_states + raw_audio_conditionning # Run conditioner hidden_states = hidden_states.permute(0, 2, 1) hidden_states = self.upsampler(hidden_states) hidden_states = hidden_states.permute(0, 2, 1) hidden_states = self.layer_norm(hidden_states) return hidden_states class JukeboxRangeEmbedding(nn.Module): """ The `JukeboxRangeEmbedding` interpolate the given [pos_start, pos_end] to obtain an equivalent of time positional embedding of length `n_ctx`. Binning process : For each pos in position tensor, find its bin [start,end) mapped to [0,1,...,bins-1] [start,end) -> [0,1) -> [0, bins) -> floor -> [0,...,bins-1] NOTE: Open ended interval on right, so start <= pos < end, not <= end """ def __init__(self, n_time, embed_dim, range, out_width, clamp=False): super().__init__() self.n_time = n_time self.embed_dim = embed_dim self.emb = nn.Embedding(embed_dim, out_width) self.pos_min, self.pos_max = range self.clamp = clamp def forward(self, pos_start, pos_end=None): # Check if [pos_start,pos_end] in [pos_min, pos_max) if not len(pos_start.shape) == 2: raise TypeError(f"Expected shape with 2 dims, got {pos_start.shape}") if not (self.pos_min <= pos_start).all() and (pos_start < self.pos_max).all(): raise TypeError(f"Range is [{self.pos_min},{self.pos_max}), got {pos_start}") pos_start = pos_start.float() if pos_end is not None: if self.clamp: pos_end = pos_end.clamp(self.pos_min, self.pos_max) pos_end = pos_end.float() # Interpolate so that [pos_start, ..., pos_end] <-> position tensor of length n_ctx n_time = self.n_time if n_time != 1: interpolation = ( torch.arange(0, n_time, dtype=torch.float, device=pos_start.device).view(1, n_time) / n_time ) position = pos_start + (pos_end - pos_start) * interpolation else: position = pos_start # Bin each value to bins_ # [0,1) -> [0,1..,embed_dim) -> [0,1...,embed_dim-1 normalised_position = (position - self.pos_min) / (self.pos_max - self.pos_min) bins_ = (self.embed_dim * normalised_position).floor().long().detach() return self.emb(bins_) class JukeboxLabelConditioner(nn.Module): def __init__(self, config, include_time_signal): super().__init__() embed_dim = config.hidden_size timing_dims = config.timing_dims sampling_rate = config.sampling_rate nb_genres, nb_artists = config.metadata_dims music_tokens_shape = config.n_ctx self.max_nb_genres = config.max_nb_genres self.bow_genre_emb = nn.Embedding(nb_genres, embed_dim) self.artist_emb = nn.Embedding(nb_artists, embed_dim) self.include_time_signal = include_time_signal if self.include_time_signal: total_length_range = (config.min_duration * sampling_rate, config.max_duration * sampling_rate) absolute_pos_range = (0.0, config.max_duration * sampling_rate) relative_pos_range = (0.0, 1.0) self.total_length_emb = JukeboxRangeEmbedding(1, timing_dims, total_length_range, embed_dim) self.absolute_pos_emb = JukeboxRangeEmbedding( music_tokens_shape, timing_dims, absolute_pos_range, embed_dim ) self.relative_pos_emb = JukeboxRangeEmbedding( music_tokens_shape, timing_dims, relative_pos_range, embed_dim, clamp=True ) def forward(self, metadata): total_length = metadata[:, 0:1] offset = metadata[:, 1:2] length = metadata[:, 2:3] artist = metadata[:, 3:4] genre = metadata[:, 4:] # Start embedding of length 1 artist_emb = self.artist_emb(artist) # Empty genre slots are denoted by -1. We mask these out. mask = (genre >= 0).float().unsqueeze(2) genre_emb = (self.bow_genre_emb(genre.clamp(0)) * mask).sum(dim=1, keepdim=True) start_emb = genre_emb + artist_emb # Pos embedding of length n_ctx if self.include_time_signal: start, end = offset, offset + length total_length = total_length.float() start = start.float() end = end.float() pos_emb = ( self.total_length_emb(total_length) + self.absolute_pos_emb(start, end) + self.relative_pos_emb(start / total_length, end / total_length) ) else: pos_emb = None return start_emb, pos_emb class JukeboxPrior(PreTrainedModel): """ The JukeboxPrior class, which is a wrapper around the various conditioning and the transformer. JukeboxPrior can be seen as language models trained on music. They model the next `music token` prediction task. If a (lyric) `encoderù is defined, it also models the `next character` prediction on the lyrics. Can be conditionned on timing, artist, genre, lyrics and codes from lower-levels Priors. Args: config (`JukeboxPriorConfig`): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. level (`int`, *optional*): Current level of the Prior. Should be in range `[0,nb_priors]`. nb_priors (`int`, *optional*, defaults to 3): Total number of priors. vqvae_encoder (`Callable`, *optional*): Encoding method of the VQVAE encoder used in the forward pass of the model. Passing functions instead of the vqvae module to avoid getting the parameters. vqvae_decoder (`Callable`, *optional*): Decoding method of the VQVAE decoder used in the forward pass of the model. Passing functions instead of the vqvae module to avoid getting the parameters. """ config_class = JukeboxPriorConfig _keys_to_ignore_on_load_unexpected = ["vqvae"] def _init_weights(self, module): init_scale = self.config.init_scale if isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=0.02 * init_scale) elif isinstance(module, JukeboxConv1D): if self.config.zero_out: module.weight.data.zero_() else: module.weight.data.normal_(mean=0.0, std=0.02 * init_scale) elif isinstance(module, JukeboxPositionalEmbedding): module.pos_emb.data.normal_(mean=0.0, std=0.01 * init_scale) elif isinstance(module, JukeboxRangeEmbedding): module.emb.weight.data.normal_(mean=0.0, std=0.01 * init_scale) elif isinstance(module, JukeboxConditionalAutoregressive) and hasattr(module, "lm_head"): module.lm_head.weight.data.normal_(mean=0.0, std=0.02 * init_scale) elif isinstance(module, JukeboxConditionalAutoregressive) and hasattr(module, "start_token"): module.start_token.data.normal_(mean=0.0, std=0.01 * init_scale) elif isinstance(module, JukeboxResConv1DBlock) and self.config.zero_out: module.conv1d_2.weigth.data.zero_() module.conv1d_2.bias.data.zero_() if isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) if isinstance(module, nn.Linear) and module.bias is not None: module.bias.data.zero_() def __init__(self, config: JukeboxPriorConfig, level=None, nb_priors=3, vqvae_encoder=None, vqvae_decoder=None): super().__init__(config) # Passing functions instead of the vqvae module to avoid getting params, only used in the # forward loop self.vqvae_encoder = vqvae_encoder self.vqvae_decoder = vqvae_decoder self.levels = nb_priors self.level = level if level is not None else config.level self.base_model_prefix = f"priors.{self.level}" self._keys_to_ignore_on_load_unexpected += [r"priors.[^%d]." % self.level] self.n_ctx = config.n_ctx self.lyric_conditioning = config.nb_relevant_lyric_tokens > 0 self.nb_relevant_lyric_tokens = config.nb_relevant_lyric_tokens self.encoder_loss_fraction = config.encoder_loss_fraction # Audio conditioning : conditioning on music tokens (either from audio or from previous levels or both) self.audio_conditioning = self.level != 0 self.cond_level = self.level - 1 if self.audio_conditioning: self.conditioner_blocks = JukeboxMusicTokenConditioner(config, self.level) # metadata conditioning : contioning on timing, genres, and artist self.metadata_conditioning = config.metadata_conditioning if self.metadata_conditioning: self.metadata_embedding = JukeboxLabelConditioner(config, include_time_signal=not self.audio_conditioning) # define encoder-decoder or encoder and decoder self.is_encoder_decoder = config.is_encoder_decoder if config.is_encoder_decoder: # encoder-decoder transformer self.input_shapes = [config.nb_relevant_lyric_tokens, config.n_ctx] self.embed_dim_shift = [0, config.lyric_vocab_size] self.width = config.hidden_size self.nb_relevant_lyric_tokens = config.nb_relevant_lyric_tokens self.prior = JukeboxConditionalAutoregressive( config, n_ctx=config.nb_relevant_lyric_tokens + config.n_ctx, embed_dim=config.lyric_vocab_size + config.music_vocab_size, audio_conditioning=(self.audio_conditioning or self.metadata_conditioning), metadata_conditioning=True, ) else: # Separate encoder-decoder transformer encoder_config = config.encoder_config if self.nb_relevant_lyric_tokens != 0 and self.lyric_conditioning: self.lyric_acts_width = encoder_config.hidden_size self.encoder_width = config.hidden_size self.encoder_dim = config.lyric_vocab_size self.encoder = JukeboxConditionalAutoregressive( encoder_config, n_ctx=self.nb_relevant_lyric_tokens, embed_dim=self.encoder_dim, audio_conditioning=False, metadata_conditioning=False, is_encoder=True, ) self.encoder.proj_in = JukeboxConv1D(encoder_config.hidden_size, config.hidden_size) self.encoder.final_layer_norm = JukeboxLayerNorm(config.hidden_size) self.encoder.lm_head = nn.Linear(config.hidden_size, config.lyric_vocab_size, bias=False) else: self.nb_relevant_lyric_tokens = 0 # decoder model on the tokens self.prior = JukeboxConditionalAutoregressive( config, audio_conditioning=(self.audio_conditioning or self.metadata_conditioning), metadata_conditioning=self.metadata_conditioning, ) self.next_token_prediction_loss_dims = config.n_ctx self.total_loss_dims = self.nb_relevant_lyric_tokens + self.next_token_prediction_loss_dims self.downsamples = [stride**down for stride, down in zip(config.res_strides_t, config.res_downs_t)] self.cond_downsample = self.downsamples[self.level] if self.level != 0 else None self.raw_to_tokens = np.prod(self.downsamples[: nb_priors - self.level]) self.sample_length = self.n_ctx * self.raw_to_tokens logger.info( f"Level:{self.level}, Cond downsample:{self.cond_downsample}, Raw to tokens:{self.raw_to_tokens}, Sample" f" length:{self.sample_length}" ) def get_metadata(self, labels, start, total_length, offset, get_indices=False): metadata = labels.clone() metadata[:, 0] = total_length # Set sample_length to match this level metadata[:, 2] = int(self.sample_length) # Set offset metadata[:, 1:2] = int(offset * self.raw_to_tokens) + int(start * self.raw_to_tokens) # here since metadata has the full token_list, we just need to selected the ones that are relevant # Set lyric tokens metadata, indices = self.set_metadata_lyric_tokens(metadata) if get_indices: return metadata, indices else: return metadata def set_metadata_lyric_tokens(self, labels): """ Processes the full labels to only retreive the relevant lyric tokens and keep the metadata conditioning tokens. """ if self.nb_relevant_lyric_tokens > 0: tokens_list = torch.zeros( (labels.shape[0], self.nb_relevant_lyric_tokens), dtype=torch.long, device=labels.device ) indices_list = [] # whats the index of each current character in original array for idx in range(labels.shape[0]): full_tokens = labels.clone()[:, 4 + self.metadata_embedding.max_nb_genres :] total_length, offset, duration = labels[idx, 0], labels[idx, 1], labels[idx, 2] tokens, indices = get_relevant_lyric_tokens( full_tokens, self.nb_relevant_lyric_tokens, total_length, offset, duration ) tokens_list[idx, :] = tokens indices_list.append(indices) return ( torch.cat((labels[:, : 4 + self.metadata_embedding.max_nb_genres], tokens_list), dim=-1), indices_list, ) else: return labels, None def get_music_tokens_conds(self, music_tokens, start, end): """ Extracts current level's conditioning music tokens. """ if self.level != 0: music_tokens_cond = music_tokens[self.level - 1] music_tokens = music_tokens_cond[:, start // self.cond_downsample : end // self.cond_downsample] missing_cond_len = self.n_ctx // self.cond_downsample - music_tokens_cond[-1].shape[-1] if missing_cond_len > 0: init_cond = torch.zeros(1, missing_cond_len).to(music_tokens_cond.device) music_tokens_cond = torch.cat((music_tokens_cond, init_cond), dim=-1).long() music_tokens_conds = [music_tokens_cond] else: music_tokens_conds = None return music_tokens_conds def prior_preprocess(self, tokens, conds): """ Shifts the input tokens to account for the dictionary merge. The embed_dim_shift give by how much the music tokens should be shifted by. It is equal to `lyric_vocab_size`. """ batch_size = tokens[0].shape[0] for i in range(len(tokens)): tokens[i] = (tokens[i] + int(self.embed_dim_shift[i])).view(batch_size, -1) for i in range(len(conds)): if conds[i] is None: conds[i] = torch.zeros( (batch_size, self.input_shapes[i], self.width), dtype=tokens[0].dtype, device=tokens[0].device ) return torch.cat(tokens, dim=1), torch.cat(conds, dim=1) def prior_postprocess(self, tokens): """ Shifts back the input tokens if the model uses an encoder decoder architecture. As the embedding layer is shared, `prior_embed_dim_shift` shifts the music token ids by `lyric_vocab_size`. Only returns the music tokens. """ batch_size = tokens.shape[0] dims = (self.input_shapes[0], tokens.shape[1] - self.input_shapes[0]) tokens = list(torch.split(tokens, dims, dim=1)) # Some of the input tokens might be shifted to take into account the voccabulary fusion for i in range(len(tokens)): bins_shift = int(self.embed_dim_shift[i]) tokens[i] = (tokens[i] - bins_shift).view(batch_size, -1) tokens[i] = torch.clamp(tokens[i], min=0) # If not masking loss, model may have generated lyric/midi tokens which are now shifted <0 by bin_shift return tokens[-1] def embed_tokens(self, music_tokens_conds): """ Embeds the upper level music tokens and upsamples them to provide as audio conditioning. """ music_tokens_conds = music_tokens_conds[: self.cond_level + 1] audio_conditioning = None for music_tokens_cond, conditioner_block in reversed(list(zip(music_tokens_conds, [self.conditioner_blocks]))): audio_conditioning = conditioner_block(music_tokens_cond, audio_conditioning) return audio_conditioning def encode(self, hidden_states, start_level=None, end_level=None, bs_chunks=1): """ Encodes the hidden states (raw audio) using the VQVAE's encoder. Returns latent_states. """ if start_level is None: start_level = self.level if end_level is None: end_level = self.levels # Get latents with torch.no_grad(): latent_states = self.vqvae_encoder( hidden_states, start_level=start_level, end_level=end_level, bs_chunks=bs_chunks ) return latent_states def decode(self, music_tokens, start_level=None, end_level=None, bs_chunks=1): """ Usamples the sequence of codebook vectors to a raw audio. """ if start_level is None: start_level = self.level if end_level is None: end_level = self.levels with torch.no_grad(): output = self.vqvae_decoder( music_tokens, start_level=start_level, end_level=end_level, bs_chunks=bs_chunks ) return output def get_cond(self, music_tokens_conds, metadata): """ Converts the input tokens to input_embeddings. Splits the lyrics form the rest of the metadata. Lyric tokens can be None. """ if metadata is not None: n_labels = metadata.shape[1] - self.nb_relevant_lyric_tokens metadata, lyric_tokens = metadata[:, :n_labels], metadata[:, n_labels:] else: metadata, lyric_tokens = None, None metadata_conditioning, metadata_pos = ( self.metadata_embedding(metadata) if self.metadata_conditioning else (None, None) ) audio_conditioning = self.embed_tokens(music_tokens_conds) if self.audio_conditioning else metadata_pos return audio_conditioning, metadata_conditioning, lyric_tokens def sample( self, n_samples, music_tokens=None, music_tokens_conds=None, metadata=None, temp=1.0, top_k=0, top_p=0.0, chunk_size=None, sample_tokens=None, ): """ Ancestral/Prime sampling a window of tokens using the provided conditioning and metadatas. Args: n_samples (`int`): Number of samples to generate. music_tokens (`List[torch.LongTensor]`, *optional*): Previously gemerated tokens at the current level. Used as context for the generation. music_tokens_conds (`List[torch.FloatTensor]`, *optional*): Upper-level music tokens generated by the previous prior model. Is `None` if the generation is not conditionned on the upper-level tokens. metadata (`List[torch.LongTensor]`, *optional*): List containing the metatdata tensor with the artist, genre and the lyric tokens. temp (`float`, *optional*, defaults to 1.0): Sampling temperature. top_k (`int`, *optional*, defaults to 0): Top k probabilities used for filtering. top_p (`float`, *optional*, defaults to 0.0): Top p probabilities used for filtering. chunk_size (`int`, *optional*): Size of the chunks used to prepare the cache of the transformer. sample_tokens (`int`, *optional*): Number of tokens to sample. """ no_past_context = music_tokens is None or music_tokens.shape[1] == 0 name = {True: "Ancestral", False: "Primed"}[no_past_context] logger.info(f"{name} sampling {n_samples} samples with temp={temp}, top_k={top_k}, top_p={top_p}") with torch.no_grad(): # Currently audio_conditioning only uses immediately above layer audio_conditioning, metadata_conditioning, lyric_tokens = self.get_cond(music_tokens_conds, metadata) if self.is_encoder_decoder: if no_past_context: # the prime_sample function will be used with music_tokens set to None lyric_and_music_tokens, audio_conditioning = self.prior_preprocess( [lyric_tokens], [None, audio_conditioning] ) else: lyric_and_music_tokens, audio_conditioning = self.prior_preprocess( [lyric_tokens, music_tokens], [None, audio_conditioning] ) if sample_tokens is not None: sample_tokens += self.nb_relevant_lyric_tokens music_tokens = self.prior.primed_sample( n_samples, lyric_and_music_tokens, audio_conditioning, metadata_conditioning, temp=temp, top_k=top_k, top_p=top_p, chunk_size=chunk_size, sample_tokens=sample_tokens, ) music_tokens = self.prior_postprocess(music_tokens) else: last_encoder_hidden_states = self.get_encoder_states(lyric_tokens, sample=True) if no_past_context: music_tokens = self.prior.sample( n_samples, audio_conditioning, metadata_conditioning, last_encoder_hidden_states, temp=temp, top_k=top_k, top_p=top_p, sample_tokens=sample_tokens, ) else: music_tokens = self.prior.primed_sample( n_samples, music_tokens, audio_conditioning, metadata_conditioning, last_encoder_hidden_states, temp=temp, top_k=top_k, top_p=top_p, chunk_size=chunk_size, sample_tokens=sample_tokens, ) return music_tokens def get_encoder_states(self, lyric_tokens, sample=False): """ Retreive the last hidden_states of the lyric encoder that will be attended to by the decoder. Forwards through the lyric encoder. """ if self.nb_relevant_lyric_tokens != 0 and self.lyric_conditioning: if sample: self.encoder = self.encoder.to(lyric_tokens.device) lyric_acts = self.encoder(lyric_tokens, None, None, None) lyric_acts = self.encoder.proj_in(lyric_acts) last_encoder_hidden_states = self.encoder.final_layer_norm(lyric_acts) else: last_encoder_hidden_states = None return last_encoder_hidden_states def get_encoder_loss(self, last_encoder_hidden_states, target_lyrics): """ Computes the loss for the lyric encoder: next lyric token prediction. """ if self.lyric_conditioning: last_encoder_hidden_states = self.encoder.lm_head(last_encoder_hidden_states) encoder_loss = nn.functional.cross_entropy( last_encoder_hidden_states.view(-1, self.encoder_dim), target_lyrics.view(-1) ) / np.log(2.0) else: encoder_loss = torch.tensor(0.0, device=last_encoder_hidden_states.device) return encoder_loss def forward_tokens( self, music_tokens, music_tokens_conds=[], metadata=None, get_preds=False, get_attn_weights=False ): """ Applies a forward pass using the conditioning tokens. Different from the classic forward as it does not use the vqvae's encoding layers. """ if get_attn_weights: self.prior.transformer.set_record_attn(get_attn_weights) audio_conditioning, metadata_conditioning, lyric_tokens = self.get_cond(music_tokens_conds, metadata) if self.is_encoder_decoder: # the preprocess returns the full tokens (Lyrics and Music tokens), shifted tokens, audio_conditioning = self.prior_preprocess( [lyric_tokens, music_tokens], [None, audio_conditioning] ) (encoder_loss, next_token_prediction_loss), preds = self.prior( tokens, audio_conditioning, metadata_conditioning, get_sep_loss=True, get_preds=get_preds ) else: last_encoder_hidden_states = self.get_encoder_states(lyric_tokens) encoder_loss = self.get_encoder_loss(last_encoder_hidden_states, lyric_tokens) next_token_prediction_loss, preds = self.prior( music_tokens, audio_conditioning, metadata_conditioning, last_encoder_hidden_states, get_preds=get_preds, ) loss = self.encoder_loss_fraction * encoder_loss * self.nb_relevant_lyric_tokens / self.total_loss_dims loss += next_token_prediction_loss * self.next_token_prediction_loss_dims / self.total_loss_dims metrics = { "bpd": next_token_prediction_loss.clone().detach(), "encoder_loss": encoder_loss.clone().detach(), "next_token_prediction_loss": next_token_prediction_loss.clone().detach(), } if get_preds: metrics["preds"] = preds.clone().detach() if get_attn_weights: saved_attn_weights = self.prior.transformer.saved_attn_weights self.prior.transformer.set_record_attn(False) return saved_attn_weights else: return loss, metrics def forward( self, hidden_states: torch.Tensor, metadata: Optional[List[torch.LongTensor]], decode: Optional[bool] = False, get_preds: Optional[bool] = False, ) -> List[torch.Tensor]: """ Encode the hidden states using the `vqvae` encoder, and then predicts the next token in the `forward_tokens` function. The loss is the sum of the `encoder` loss and the `decoder` loss. Args: hidden_states (`torch.Tensor`): Hidden states which should be raw audio metadata (`List[torch.LongTensor]`, *optional*): List containing the metadata conditioning tensorwith the lyric and the metadata tokens. decode (`bool`, *optional*, defaults to `False`): Whether or not to decode the encoded to tokens. get_preds (`bool`, *optional*, defaults to `False`): Whether or not to return the actual predicitons of the model. """ batch_size = hidden_states.shape[0] music_tokens, *music_tokens_conds = self.encode(hidden_states, bs_chunks=batch_size) loss, metrics = self.forward_tokens( music_tokens=music_tokens, music_tokens_conds=music_tokens_conds, metadata=metadata, get_preds=get_preds, ) if decode: dequantised_states = self.decode([music_tokens, *music_tokens_conds]) else: dequantised_states = None return dequantised_states, loss, metrics class JukeboxPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = JukeboxConfig base_model_prefix = "jukebox" supports_gradient_checkpointing = False def _init_weights(self, module): if isinstance(module, JukeboxPrior) or isinstance(module, JukeboxVQVAE): module.apply(module._init_weights) def __init__(self, *inputs, **kwargs): super().__init__(*inputs, **kwargs) JUKEBOX_SAMPLING_INPUT_DOCSTRING = r""" labels (`List[torch.LongTensor]` of length `n_sample`, and shape `(self.levels, self.config.max_nb_genre + lyric_sequence_length)` : List of metadata such as `artist_id`, `genre_id` and the full list of lyric tokens which are used to condition the generation. sampling_kwargs (`Dict[Any]`): Various additional sampling arguments that are used by the `_sample` function. A detail list of the arguments can bee seen in the [`_sample`] function documentation. """ @add_start_docstrings( """The bare JUKEBOX Model used for music generation. 4 sampling techniques are supported : `primed_sample`, `upsample`, `continue_sample` and `ancestral_sample`. It does not have a `forward` method as the training is not end to end. If you want to fine-tune the model, it is recommended to use the `JukeboxPrior` class and train each prior individually. """, JUKEBOX_START_DOCSTRING, ) class JukeboxModel(JukeboxPreTrainedModel): _no_split_modules = ["JukeboxBlock"] def __init__(self, config): super().__init__(config) vqvae_config = config.vqvae_config self.vqvae = JukeboxVQVAE(vqvae_config) self.set_shared_params(config) self.priors = nn.ModuleList( [JukeboxPrior(config.prior_configs[level], level) for level in range(config.nb_priors)] ) def set_shared_params(self, model_config): """ Initialises the parameters that are shared. This has to be done here because the list of `JukeboxPriorConfig` is nest, and is thus unreachable in the `from_dict` function """ for config in model_config.prior_configs: config.sampling_rate = model_config.sampling_rate config.timing_dims = model_config.timing_dims config.min_duration = model_config.min_duration config.max_duration = model_config.max_duration config.max_nb_genres = model_config.max_nb_genres config.metadata_conditioning = model_config.metadata_conditioning def decode(self, music_tokens, start_level=0, end_level=None, bs_chunks=1): return self.vqvae.decode(music_tokens, start_level, end_level, bs_chunks) def encode(self, input_audio, start_level=0, end_level=None, bs_chunks=1): return self.vqvae.encode(input_audio, start_level, end_level, bs_chunks) def split_batch(self, obj, n_samples, split_size): n_passes = (n_samples + split_size - 1) // split_size if isinstance(obj, torch.Tensor): return torch.split(obj, split_size, dim=0) elif isinstance(obj, list): return list(zip(*[torch.split(item, split_size, dim=0) for item in obj])) elif obj is None: return [None] * n_passes else: raise TypeError("Unknown input type") # Sample a partial window of length<n_ctx with tokens_to_sample new tokens on level=level def sample_partial_window( self, music_tokens, labels, offset, sampling_kwargs, level, tokens_to_sample, max_batch_size ): prior = self.priors[level] sampled_tokens = music_tokens[level] n_ctx = prior.n_ctx nb_sampled_tokens = sampled_tokens.shape[1] if nb_sampled_tokens < n_ctx - tokens_to_sample: sampling_kwargs["sample_tokens"] = nb_sampled_tokens + tokens_to_sample start = 0 else: sampling_kwargs["sample_tokens"] = n_ctx start = nb_sampled_tokens - n_ctx + tokens_to_sample return self.sample_single_window(music_tokens, labels, offset, sampling_kwargs, level, start, max_batch_size) # Sample a single window of length=n_ctx at position=start on level=level def sample_single_window(self, music_tokens, labels, offset, sampling_kwargs, level, start, max_batch_size): prior = self.priors[level] n_samples = music_tokens[0].shape[0] n_ctx = prior.n_ctx end = start + n_ctx # get music_tokens already sampled at current level previous_sampled_tokens = music_tokens[level][:, start:end] sample_tokens = sampling_kwargs.get("sample_tokens", None) if "sample_tokens" in sampling_kwargs: sample_tokens = end - start conditioning_tokens = previous_sampled_tokens.shape[1] new_tokens = sample_tokens - previous_sampled_tokens.shape[1] logger.info( f"Sampling {sample_tokens} tokens for [{start},{start+sample_tokens}]. Conditioning on" f" {conditioning_tokens} tokens" ) if new_tokens <= 0: # Nothing new to sample return music_tokens # get music_tokens_conds from level above music_tokens_conds = prior.get_music_tokens_conds(music_tokens, start, end) # if there are no levels above should return None! # set metadata offset, sample_length and lyrics tokens metadata = prior.get_metadata(labels, start, self.total_length, offset) music_tokens_list = self.split_batch(previous_sampled_tokens, n_samples, max_batch_size) music_tokens_conds_list = self.split_batch(music_tokens_conds, n_samples, max_batch_size) metadata_list = self.split_batch(metadata, n_samples, max_batch_size) tokens = [] iterator = tqdm(zip(music_tokens_list, music_tokens_conds_list, metadata_list), leave=False) for music_tokens_i, music_tokens_conds_i, metadata_i in iterator: name = ["Ancestral", "Primed"][music_tokens_i.shape[1] == 0] iterator.set_description( f"[prior level {level}] {name} Sampling {sample_tokens} tokens out of" f" {self.total_length//prior.raw_to_tokens}", refresh=True, ) tokens_i = prior.sample( n_samples=music_tokens_i.shape[0], music_tokens=music_tokens_i, music_tokens_conds=music_tokens_conds_i, metadata=metadata_i, **sampling_kwargs, ) tokens.append(tokens_i) sampled_tokens = torch.cat(tokens, dim=0) # Update music_tokens with new sample music_tokens_new = sampled_tokens[:, -new_tokens:] music_tokens[level] = torch.cat([music_tokens[level], music_tokens_new], dim=1) return music_tokens # Sample total_length tokens at level=level with hop_length=hop_length def sample_level( self, music_tokens, labels, offset, sampling_kwargs, level, total_length, hop_length, max_batch_size ): if total_length >= self.priors[level].n_ctx: iterator = get_starts(total_length, self.priors[level].n_ctx, hop_length) for start in iterator: music_tokens = self.sample_single_window( music_tokens, labels, offset, sampling_kwargs, level, start, max_batch_size ) else: music_tokens = self.sample_partial_window( music_tokens, labels, offset, sampling_kwargs, level, total_length, max_batch_size ) return music_tokens @torch.no_grad() def _sample( self, music_tokens, labels, sample_levels, metas=None, chunk_size=32, sampling_temperature=0.98, lower_batch_size=16, max_batch_size=16, sample_length_in_seconds=24, compute_alignments=False, sample_tokens=None, offset=0, save_results=True, sample_length=None, ) -> List[torch.LongTensor]: """ Core sampling function used to generate music tokens. Iterates over the provided list of levels, while saving the generated raw audio at each step. Args: music_tokens (`List[torch.LongTensor]`): A sequence of music tokens of length `self.levels` which will be used as context to continue the sampling process. Should have `self.levels` tensors, each corresponding to the generation at a certain level. labels (`List[torch.LongTensor]`): List of length `n_sample`, and shape `(self.levels, 4 + self.config.max_nb_genre + lyric_sequence_length)` metadata such as `artist_id`, `genre_id` and the full list of lyric tokens which are used to condition the generation. sample_levels (`List[int]`): List of the desired levels at which the sampling will be done. A level is equivalent to the index of the prior in the list of priors metas (`List[Any]`, *optional*): Metadatas used to generate the `labels` chunk_size (`int`, *optional*, defaults to 32): Size of a chunk of audio, used to fill up the memory in chuncks to prevent OOM erros. Bigger chunks means faster memory filling but more consumption. sampling_temperature (`float`, *optional*, defaults to 0.98): Temperature used to ajust the randomness of the sampling. lower_batch_size (`int`, *optional*, defaults to 16): Maximum batch size for the lower level priors max_batch_size (`int`, *optional*, defaults to 16): Maximum batch size for the top level priors sample_length_in_seconds (`int`, *optional*, defaults to 24): Desired length of the generation in seconds compute_alignments (`bool`, *optional*, defaults to `False`): Whether or not to compute the alignment between the lyrics and the audio using the top_prior sample_tokens (`int`, *optional*): Precise number of tokens that should be sampled at each level. This is mostly useful for running dummy experiments offset (`int`, *optional*, defaults to 0): Audio offset used as conditioning, corresponds to the starting sample in the music. If the offset is greater than 0, the lyrics will be shifted take that intoaccount save_results (`bool`, *optional*, defaults to `True`): Whether or not to save the intermediate results. If `True`, will generate a folder named with the start time. sample_length (`int`, *optional*): Desired length of the generation in samples. Returns: torch.Tensor Example: ```python >>> from transformers import AutoTokenizer, JukeboxModel, set_seed >>> import torch >>> metas = dict(artist="Zac Brown Band", genres="Country", lyrics="I met a traveller from an antique land") >>> tokenizer = AutoTokenizer.from_pretrained("openai/jukebox-1b-lyrics") >>> model = JukeboxModel.from_pretrained("openai/jukebox-1b-lyrics", min_duration=0).eval() >>> labels = tokenizer(**metas)["input_ids"] >>> set_seed(0) >>> zs = [torch.zeros(1, 0, dtype=torch.long) for _ in range(3)] >>> zs = model._sample(zs, labels, [0], sample_length=40 * model.priors[0].raw_to_tokens, save_results=False) >>> zs[0] tensor([[1853, 1369, 1150, 1869, 1379, 1789, 519, 710, 1306, 1100, 1229, 519, 353, 1306, 1379, 1053, 519, 653, 1631, 1467, 1229, 1229, 10, 1647, 1254, 1229, 1306, 1528, 1789, 216, 1631, 1434, 653, 475, 1150, 1528, 1804, 541, 1804, 1434]]) ``` """ top_prior = self.priors[0] if sample_length is not None: total_length = sample_length else: total_length = ( int(sample_length_in_seconds * self.config.sampling_rate) // top_prior.raw_to_tokens ) * top_prior.raw_to_tokens if sample_levels is None: sample_levels = range(len(self.priors)) # total length of the signal, might be bit different from the actual generated length self.total_length = total_length for level in sample_levels: sampling_kwargs = { "temp": 0.99 if level == len(self.priors) - 1 else sampling_temperature, "chunk_size": chunk_size, "sample_tokens": sample_tokens, } # Set correct total_length, hop_length, labels and sampling_kwargs for level total_token_to_sample = total_length // self.priors[level].raw_to_tokens hop_length = int(self.config.hop_fraction[level] * self.priors[level].n_ctx) max_batch_size = lower_batch_size if level != sample_levels else max_batch_size music_tokens = self.sample_level( music_tokens, labels[level], offset, sampling_kwargs, level, total_token_to_sample, hop_length, max_batch_size, ) if save_results: self.vqvae.to(music_tokens[level].device) # Decode sample with torch.no_grad(): start_level = len(self.priors) - level - 1 # vqvae levels are reversed raw_audio = self.vqvae.decode( music_tokens[: level + 1], start_level=start_level, bs_chunks=music_tokens[level].shape[0] ) logdir = f"jukebox/level_{level}" if not os.path.exists(logdir): os.makedirs(logdir) save_temp_audio(logdir, level, metas=metas, aud=raw_audio.float()) if compute_alignments and self.priors[0] is not None and self.priors[0].nb_relevant_lyric_tokens > 0: with torch.no_grad(): alignments = get_alignment(music_tokens, labels[0], self.priors[0], self.config) torch.save({"alignments": alignments}, f"{logdir}/lyric_alignments.pt") return music_tokens @add_start_docstrings( """ Generates music tokens based on the provided `labels. Will start at the desired prior level and automatically upsample the sequence. If you want to create the audio, you should call `model.decode(tokens)`, which will use the VQ-VAE decoder to convert the music tokens to raw audio. Args: labels (`List[torch.LongTensor]`) : List of length `n_sample`, and shape `(self.levels, 4 + self.config.max_nb_genre + lyric_sequence_length)` metadata such as `artist_id`, `genre_id` and the full list of lyric tokens which are used to condition the generation. n_samples (`int`, *optional*, default to 1) : Number of samples to be generated in parallel. """, ) def ancestral_sample(self, labels, n_samples=1, **sampling_kwargs) -> List[torch.LongTensor]: """ Example: ```python >>> from transformers import AutoTokenizer, JukeboxModel, set_seed >>> model = JukeboxModel.from_pretrained("openai/jukebox-1b-lyrics", min_duration=0).eval() >>> tokenizer = AutoTokenizer.from_pretrained("openai/jukebox-1b-lyrics") >>> lyrics = "Hey, are you awake? Can you talk to me?" >>> artist = "Zac Brown Band" >>> genre = "Country" >>> metas = tokenizer(artist=artist, genres=genre, lyrics=lyrics) >>> set_seed(0) >>> music_tokens = model.ancestral_sample(metas.input_ids, sample_length=400) >>> with torch.no_grad(): ... model.decode(music_tokens)[:, :10].squeeze(-1) tensor([[-0.0219, -0.0679, -0.1050, -0.1203, -0.1271, -0.0936, -0.0396, -0.0405, -0.0818, -0.0697]]) ``` """ sample_levels = sampling_kwargs.pop("sample_levels", list(range(len(self.priors)))) music_tokens = [ torch.zeros(n_samples, 0, dtype=torch.long, device=labels[0].device) for _ in range(len(self.priors)) ] music_tokens = self._sample(music_tokens, labels, sample_levels, **sampling_kwargs) return music_tokens @add_start_docstrings( """Generates a continuation of the previously generated tokens. Args: music_tokens (`List[torch.LongTensor]` of length `self.levels` ) : A sequence of music tokens which will be used as context to continue the sampling process. Should have `self.levels` tensors, each corresponding to the generation at a certain level. """, JUKEBOX_SAMPLING_INPUT_DOCSTRING, ) def continue_sample(self, music_tokens, labels, **sampling_kwargs) -> List[torch.LongTensor]: sample_levels = sampling_kwargs.pop("sample_levels", list(range(len(self.priors)))) music_tokens = self._sample(music_tokens, labels, sample_levels, **sampling_kwargs) return music_tokens @add_start_docstrings( """Upsamples a sequence of music tokens using the prior at level `level`. Args: music_tokens (`List[torch.LongTensor]` of length `self.levels` ) : A sequence of music tokens which will be used as context to continue the sampling process. Should have `self.levels` tensors, each corresponding to the generation at a certain level. """, JUKEBOX_SAMPLING_INPUT_DOCSTRING, ) def upsample(self, music_tokens, labels, **sampling_kwargs) -> List[torch.LongTensor]: sample_levels = sampling_kwargs.pop("sample_levels", list(range(len(self.priors) - 1))) music_tokens = self._sample(music_tokens, labels, sample_levels, **sampling_kwargs) return music_tokens @add_start_docstrings( """Generate a raw audio conditioned on the provided `raw_audio` which is used as conditioning at each of the generation levels. The audio is encoded to music tokens using the 3 levels of the VQ-VAE. These tokens are used: as conditioning for each level, which means that no ancestral sampling is required. Args: raw_audio (`List[torch.Tensor]` of length `n_samples` ) : A list of raw audio that will be used as conditioning information for each samples that will be generated. """, JUKEBOX_SAMPLING_INPUT_DOCSTRING, ) def primed_sample(self, raw_audio, labels, **sampling_kwargs) -> List[torch.LongTensor]: sample_levels = sampling_kwargs.pop("sample_levels", list(range(len(self.priors)))) self.vqvae.to(raw_audio.device).float() with torch.no_grad(): music_tokens = self.vqvae.encode( raw_audio, start_level=0, end_level=len(self.priors), bs_chunks=raw_audio.shape[0] ) music_tokens = self._sample(music_tokens, labels, sample_levels, **sampling_kwargs) return music_tokens
27182812/ChatGLM-LLaMA-chinese-insturct
1,361
src/transformers/models/gpt_sw3/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available _import_structure = {} try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_gpt_sw3"] = ["GPTSw3Tokenizer"] if TYPE_CHECKING: try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_gpt_sw3 import GPTSw3Tokenizer else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
8,156
src/transformers/models/gpt_sw3/convert_megatron_to_pytorch.py
# Copyright 2022 The HuggingFace Inc. team and the AI-Sweden team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Convert GPT-SW3 megatron checkpoints to pytorch""" import argparse import os from os.path import isfile import torch from transformers import GPT2Config def recursive_print(name, val, spaces=0): # Format the message. if name is None: msg = None else: fmt = "." * max(0, spaces - 2) + "# {:" + str(50 - spaces) + "s}" msg = fmt.format(name) # Print and recurse (if needed). if isinstance(val, dict): if msg is not None: print(msg) for k in val.keys(): recursive_print(k, val[k], spaces + 2) elif isinstance(val, torch.Tensor): print(msg, ":", val.size()) else: print(msg, ":", val) def fix_query_key_value_ordering(param, num_splits, num_heads, hidden_size): # Permutes layout of param tensor to [num_splits * num_heads * hidden_size, :] # for compatibility with later versions of NVIDIA Megatron-LM. # The inverse operation is performed inside Megatron-LM to read checkpoints: # https://github.com/NVIDIA/Megatron-LM/blob/v2.4/megatron/checkpointing.py#L209 # If param is the weight tensor of the self-attention block, the returned tensor # will have to be transposed one more time to be read by HuggingFace GPT2. input_shape = param.size() # other versions store [num_heads * num_splits * hidden_size, :] saved_shape = (num_heads, num_splits, hidden_size) + input_shape[1:] param = param.view(*saved_shape) param = param.transpose(0, 1).contiguous() param = param.view(*input_shape) return param def convert_megatron_checkpoint(sd_megatron, config): """ Converts a Megatron checkpoint to a HuggingFace GPT-SW3 checkpoint. """ n_positions = config.n_positions layers = config.n_layer vocab_size = config.vocab_size heads = config.n_head hidden_size_per_head = config.n_embd // config.n_head word_embeddings = sd_megatron["model.language_model.embedding.word_embeddings.weight"][:vocab_size, :] sd_hf = { "transformer.wte.weight": word_embeddings, "transformer.wpe.weight": sd_megatron["model.language_model.embedding.position_embeddings.weight"], "transformer.ln_f.weight": sd_megatron["model.language_model.encoder.final_layernorm.weight"], "transformer.ln_f.bias": sd_megatron["model.language_model.encoder.final_layernorm.bias"], } pf = "model.language_model.encoder.layers." for i in range(layers): causal_mask = torch.tril(torch.ones((n_positions, n_positions), dtype=torch.bool)) causal_mask = causal_mask.view(1, 1, n_positions, n_positions) sd_hf[f"transformer.h.{i}.attn.bias"] = causal_mask sd_hf[f"transformer.h.{i}.attn.masked_bias"] = torch.tensor(-1e4, dtype=torch.bfloat16) sd_hf[f"transformer.h.{i}.ln_1.weight"] = sd_megatron[f"{pf}{i}.input_layernorm.weight"] sd_hf[f"transformer.h.{i}.ln_1.bias"] = sd_megatron[f"{pf}{i}.input_layernorm.bias"] val1 = sd_megatron[f"{pf}{i}.self_attention.query_key_value.weight"] val1 = fix_query_key_value_ordering(val1, 3, heads, hidden_size_per_head) sd_hf[f"transformer.h.{i}.attn.c_attn.weight"] = val1.transpose(0, 1).contiguous() val2 = sd_megatron[f"{pf}{i}.self_attention.query_key_value.bias"] val2 = fix_query_key_value_ordering(val2, 3, heads, hidden_size_per_head) sd_hf[f"transformer.h.{i}.attn.c_attn.bias"] = val2 sd_hf[f"transformer.h.{i}.attn.c_proj.weight"] = sd_megatron[f"{pf}{i}.self_attention.dense.weight"].transpose( 0, 1 ) sd_hf[f"transformer.h.{i}.attn.c_proj.bias"] = sd_megatron[f"{pf}{i}.self_attention.dense.bias"] sd_hf[f"transformer.h.{i}.ln_2.weight"] = sd_megatron[f"{pf}{i}.post_attention_layernorm.weight"] sd_hf[f"transformer.h.{i}.ln_2.bias"] = sd_megatron[f"{pf}{i}.post_attention_layernorm.bias"] sd_hf[f"transformer.h.{i}.mlp.c_fc.weight"] = sd_megatron[f"{pf}{i}.mlp.dense_h_to_4h.weight"].transpose(0, 1) sd_hf[f"transformer.h.{i}.mlp.c_fc.bias"] = sd_megatron[f"{pf}{i}.mlp.dense_h_to_4h.bias"] sd_hf[f"transformer.h.{i}.mlp.c_proj.weight"] = sd_megatron[f"{pf}{i}.mlp.dense_4h_to_h.weight"].transpose( 0, 1 ) sd_hf[f"transformer.h.{i}.mlp.c_proj.bias"] = sd_megatron[f"{pf}{i}.mlp.dense_4h_to_h.bias"] # For LM head, transformers' wants the matrix to weight embeddings. sd_hf["lm_head.weight"] = word_embeddings return sd_hf def copy_config(config_hf, config_megatron): """Copy the config from Megatron to hf.""" config_hf.vocab_size = 64000 config_hf.n_positions = config_megatron["encoder_seq_length"] config_hf.n_embd = config_megatron["hidden_size"] config_hf.n_layer = config_megatron["num_layers"] config_hf.n_head = config_megatron["num_attention_heads"] config_hf.n_inner = config_megatron["ffn_hidden_size"] config_hf.activation_function = "gelu" config_hf.resid_pdrop = 0.1 config_hf.embd_pdrop = 0.1 config_hf.attn_pdrop = 0.1 config_hf.layer_norm_epsilon = config_megatron["layernorm_epsilon"] # 1e-5 config_hf.initializer_range = config_megatron["init_method_std"] # 0.02 config_hf.apply_query_key_layer_scaling = config_megatron["apply_query_key_layer_scaling"] # True config_hf.normalize_attention_scores = True config_hf.use_cache = True # This identifies the 6.7B (7B) model which uses a different tokenizer if config_megatron["hidden_size"] == 4096: config_hf.bos_token_id = 1 # <|endoftext|> config_hf.eos_token_id = 1 # <|endoftext|> config_hf.pad_token_id = 0 # <unk> else: config_hf.bos_token_id = 2 # <s> config_hf.eos_token_id = 3 # <|endoftext|> config_hf.pad_token_id = 0 # <pad> return config_hf def main(args): print(args) checkpoint_path = args.checkpoint_path save_path = args.save_path if isfile(checkpoint_path): raise FileNotFoundError(f"ERROR! could not find file {checkpoint_path}") # Load the model. checkpoint = torch.load(checkpoint_path, map_location="cpu") # Load the config. config_megatron = checkpoint["hyper_parameters"]["cfg"] config_hf = GPT2Config() config_hf = copy_config(config_hf=config_hf, config_megatron=config_megatron) config_hf.architectures = ["GPT2LMHeadModel"] sd_megatron = checkpoint["state_dict"] # Convert. print("Converting") sd_hf = convert_megatron_checkpoint(sd_megatron, config_hf) # Print the structure of converted state dict. if args.print_checkpoint_structure: recursive_print(None, sd_hf) config_hf.tokenizer_class = "GPTSw3Tokenizer" # Store the config to file. print("Saving config") config_hf.save_pretrained(save_path) # Store the state_dict to file. output_checkpoint_file = os.path.join(save_path, "pytorch_model.bin") print(f'Saving checkpoint to "{output_checkpoint_file}"') torch.save(sd_hf, output_checkpoint_file) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "--checkpoint_path", type=str, required=True, help="e.g. megatron_gpt--val_loss=2.42-step=38000-consumed_samples=54720000", ) parser.add_argument("--save_path", type=str, required=True, help="e.g. /home/user/gpt-sw3/hf") parser.add_argument("--print-checkpoint-structure", action="store_true") _args = parser.parse_args() main(_args)
27182812/ChatGLM-LLaMA-chinese-insturct
13,156
src/transformers/models/gpt_sw3/tokenization_gpt_sw3.py
import os import re import unicodedata from ... import is_torch_available if is_torch_available(): import torch from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "spiece.model"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "AI-Sweden/gpt-sw3-126m": "https://huggingface.co/AI-Sweden/gpt-sw3-126m/resolve/main/spiece.model", "AI-Sweden/gpt-sw3-350m": "https://huggingface.co/AI-Sweden/gpt-sw3-350m/resolve/main/spiece.model", "AI-Sweden/gpt-sw3-1.6b": "https://huggingface.co/AI-Sweden/gpt-sw3-1.6b/resolve/main/spiece.model", "AI-Sweden/gpt-sw3-6.7b": "https://huggingface.co/AI-Sweden/gpt-sw3-6.7b/resolve/main/spiece.model", "AI-Sweden/gpt-sw3-20b": "https://huggingface.co/AI-Sweden/gpt-sw3-20b/resolve/main/spiece.model", } } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "AI-Sweden/gpt-sw3-126m": 2048, "AI-Sweden/gpt-sw3-350m": 2048, "AI-Sweden/gpt-sw3-1.6b": 2048, "AI-Sweden/gpt-sw3-6.7b": 2048, "AI-Sweden/gpt-sw3-20b": 2048, } class GPTSw3Tokenizer(PreTrainedTokenizer): """ Construct an GPTSw3 tokenizer. Based on [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Example usage: ``` >>> from transformers import GPTSw3Tokenizer >>> tokenizer = GPTSw3Tokenizer.from_pretrained("AI-Sweden/gpt-sw3-126m") >>> tokenizer("Svenska är kul!")['input_ids'] [1814, 377, 3617, 63504] ``` Args: vocab_file (`str`): [SentencePiece](https://github.com/google/sentencepiece) file (generally has a *.spm* extension) that contains the vocabulary necessary to instantiate a tokenizer. do_lower_case (`bool`, *optional*, defaults to `False`): Whether or not to lowercase the input when tokenizing. remove_space (`bool`, *optional*, defaults to `False`): Whether or not to strip the text when tokenizing (removing excess spaces before and after the string). keep_accents (`bool`, *optional*, defaults to `False`): Whether or not to keep accents when tokenizing. bos_token (`str`, *optional*): The beginning of sequence token that can be used for downstream task, was not seen during pretraining. If not provided, will default to '<s>' or '<|endoftext|>', depending on model size. eos_token (`str`, *optional*): The end of sequence token seen during pretraining. If not provided, will default to '<|endoftext|>' unk_token (`str`, *optional*): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. If not provided, will default to '<unk>'. pad_token (`str`, *optional*): The token used for padding, for example when batching sequences of different lengths. If not provided, will default to '<pad>' or '<unk>' depending on model size. sp_model_kwargs (`dict`, *optional*): Will be passed to the `SentencePieceProcessor.__init__()` method. The [Python wrapper for SentencePiece](https://github.com/google/sentencepiece/tree/master/python) can be used, among other things, to set: - `enable_sampling`: Enable subword regularization. - `nbest_size`: Sampling parameters for unigram. Invalid for BPE-Dropout. - `nbest_size = {0,1}`: No sampling is performed. - `nbest_size > 1`: samples from the nbest_size results. - `nbest_size < 0`: assuming that nbest_size is infinite and samples from the all hypothesis (lattice) using forward-filtering-and-backward-sampling algorithm. - `alpha`: Smoothing parameter for unigram sampling, and dropout probability of merge operations for BPE-dropout. Attributes: sp_model (`SentencePieceProcessor`): The *SentencePiece* processor that is used for every conversion (string, tokens and IDs). whitespaces (`set`): The whitespaces that are replaced in the whitespace normalization in preprocessing. non_printing_characters_re (`Pattern`): The compiled regular expression to remove non-printing characters in preprocessing. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES def __init__( self, vocab_file, do_lower_case=False, remove_space=False, keep_accents=False, pad_token=None, unk_token=None, eos_token=None, bos_token=None, sp_model_kwargs: Optional[Dict[str, Any]] = None, **kwargs, ) -> None: self.sp_model_kwargs = {} if sp_model_kwargs is None else sp_model_kwargs name_or_path = kwargs.get("name_or_path") if name_or_path is None: logger.warning( "name_or_path not provided, will work for all GPTSw3 models except gpt-sw3-7b," " you are testing the model, this can safely be ignored" ) name_or_path = "None" # Default definitions for our 2 tokenizer versions, with None-checks to enable proper testing eos_token = "<|endoftext|>" if eos_token is None else eos_token unk_token = "<unk>" if unk_token is None else unk_token if "gpt-sw3-7b" in name_or_path: pad_token = unk_token if pad_token is None else pad_token bos_token = eos_token if bos_token is None else bos_token else: pad_token = "<pad>" if pad_token is None else pad_token bos_token = "<s>" if bos_token is None else bos_token super().__init__( do_lower_case=do_lower_case, remove_space=remove_space, keep_accents=keep_accents, bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, pad_token=pad_token, sp_model_kwargs=self.sp_model_kwargs, **kwargs, ) self.do_lower_case = do_lower_case self.remove_space = remove_space self.keep_accents = keep_accents self.vocab_file = vocab_file self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(vocab_file) # Used for whitespace normalization in input texts # fmt : off self.whitespaces = {" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", "", "„"} # fmt : on # Regular expression to remove non-printing characters (e.g. some unicode control chars) in preprocessing self.non_printing_characters_re = re.compile( f"[{''.join(map(chr, list(range(0, 9)) + list(range(11, 32)) + list(range(127, 160)) + [160, 173, 8203]))}]" ) # Copied from transformers.models.albert.tokenization_albert.AlbertTokenizer.__getstate__ def __getstate__(self): state = self.__dict__.copy() state["sp_model"] = None return state # Copied from transformers.models.albert.tokenization_albert.AlbertTokenizer.__setstate__ def __setstate__(self, d): self.__dict__ = d # for backward compatibility if not hasattr(self, "sp_model_kwargs"): self.sp_model_kwargs = {} self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(self.vocab_file) @property # Copied from transformers.models.albert.tokenization_albert.AlbertTokenizer.vocab_size def vocab_size(self) -> int: return len(self.sp_model) def preprocess_text(self, text: str) -> str: """ Returns the preprocessed text. This procedure is identical to what was used when training the tokenizer. """ # Remove non-printing characters text = self.non_printing_characters_re.sub("", text) # Normalize whitespaces text = "".join([char if char not in self.whitespaces else " " for char in text]) # NFC Unicode normalization text = unicodedata.normalize("NFC", text) return text def _tokenize(self, text: str, **kwargs) -> List[str]: text = self.preprocess_text(text) return self.sp_model.encode(text, out_type=str) def _convert_token_to_id(self, token: str) -> int: """Converts a token (str) to an id (int) using the vocab.""" return self.sp_model.PieceToId(token) def _convert_id_to_token(self, index: int) -> str: """Converts an index (int) to a token (str) using the vocab.""" return self.sp_model.IdToPiece(index) @staticmethod def clean_up_tokenization(out_string: str) -> str: """Returns the input string, this function is overridden to remove the default clean up.""" return out_string def convert_tokens_to_string(self, tokens: List[str]) -> str: """Converts a sequence of tokens (strings) to a single string. Special tokens remain intact.""" current_sub_tokens = [] out_string = "" prev_is_special = False for token in tokens: # make sure that special tokens are not decoded using sentencepiece model if token in self.all_special_tokens: if not prev_is_special: out_string += " " out_string += self.sp_model.decode(current_sub_tokens) + token prev_is_special = True current_sub_tokens = [] else: current_sub_tokens.append(token) prev_is_special = False out_string += self.sp_model.decode(current_sub_tokens) return out_string # Copied from transformers.models.albert.tokenization_albert.AlbertTokenizer.get_vocab def get_vocab(self) -> Dict[str, int]: vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} vocab.update(self.added_tokens_encoder) return vocab # Copied from transformers.models.albert.tokenization_albert.AlbertTokenizer.save_vocabulary def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file) and os.path.isfile(self.vocab_file): copyfile(self.vocab_file, out_vocab_file) elif not os.path.isfile(self.vocab_file): with open(out_vocab_file, "wb") as fi: content_spiece_model = self.sp_model.serialized_model_proto() fi.write(content_spiece_model) return (out_vocab_file,) def encode_fast( self, text: Union[str, List[str]], return_tensors: Union[str, bool] = False ) -> Union[List[int], List[List[int]], "torch.Tensor"]: """ Encodes a text or batch of texts to token ids using preprocessing and the raw SP tokenizer. This has reduced functionality but is often much faster. Does NOT handle special tokens correctly, these can manually be added as ids afterwards. Does NOT support padding, these can manually be added as ids afterwards. Use default HuggingFace tokenization methods for full functionality. Args: text (`str` or `List[str]`): One or several text(s) to convert to token ids. return_tensors (`str` or `bool`): Returns PyTorch tensors if set to True or "pt" Returns: `List[int]`, `List[List[int]]`, or `torch.Tensor`: The encoded text(s) as token ids. """ if isinstance(text, str): text = self.preprocess_text(text) token_ids = self.sp_model.encode(text) else: text = [self.preprocess_text(t) for t in text] token_ids = self.sp_model.encode(text) if return_tensors is True or return_tensors == "pt": token_ids = torch.tensor(token_ids) return token_ids def decode_fast(self, token_ids: Union[int, List[int]]) -> str: """ Encodes a text or batch of texts to token ids using preprocessing and the raw SP tokenizer. This has reduced functionality but is often much faster. Args: token_ids (`int` or `List[int]`): Encoded token or text as token id(s). Returns: `str`: Decoded text """ return self.sp_model.decode(token_ids)
27182812/ChatGLM-LLaMA-chinese-insturct
4,288
src/transformers/models/xlnet/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) _import_structure = {"configuration_xlnet": ["XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLNetConfig"]} try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_xlnet"] = ["XLNetTokenizer"] try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_xlnet_fast"] = ["XLNetTokenizerFast"] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_xlnet"] = [ "XLNET_PRETRAINED_MODEL_ARCHIVE_LIST", "XLNetForMultipleChoice", "XLNetForQuestionAnswering", "XLNetForQuestionAnsweringSimple", "XLNetForSequenceClassification", "XLNetForTokenClassification", "XLNetLMHeadModel", "XLNetModel", "XLNetPreTrainedModel", "load_tf_weights_in_xlnet", ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_tf_xlnet"] = [ "TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST", "TFXLNetForMultipleChoice", "TFXLNetForQuestionAnsweringSimple", "TFXLNetForSequenceClassification", "TFXLNetForTokenClassification", "TFXLNetLMHeadModel", "TFXLNetMainLayer", "TFXLNetModel", "TFXLNetPreTrainedModel", ] if TYPE_CHECKING: from .configuration_xlnet import XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP, XLNetConfig try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_xlnet import XLNetTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_xlnet_fast import XLNetTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_xlnet import ( XLNET_PRETRAINED_MODEL_ARCHIVE_LIST, XLNetForMultipleChoice, XLNetForQuestionAnswering, XLNetForQuestionAnsweringSimple, XLNetForSequenceClassification, XLNetForTokenClassification, XLNetLMHeadModel, XLNetModel, XLNetPreTrainedModel, load_tf_weights_in_xlnet, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_xlnet import ( TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST, TFXLNetForMultipleChoice, TFXLNetForQuestionAnsweringSimple, TFXLNetForSequenceClassification, TFXLNetForTokenClassification, TFXLNetLMHeadModel, TFXLNetMainLayer, TFXLNetModel, TFXLNetPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
3,688
src/transformers/models/xlnet/convert_xlnet_original_tf_checkpoint_to_pytorch.py
# coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert BERT checkpoint.""" import argparse import os import torch from transformers import ( XLNetConfig, XLNetForQuestionAnswering, XLNetForSequenceClassification, XLNetLMHeadModel, load_tf_weights_in_xlnet, ) from transformers.utils import CONFIG_NAME, WEIGHTS_NAME, logging GLUE_TASKS_NUM_LABELS = { "cola": 2, "mnli": 3, "mrpc": 2, "sst-2": 2, "sts-b": 1, "qqp": 2, "qnli": 2, "rte": 2, "wnli": 2, } logging.set_verbosity_info() def convert_xlnet_checkpoint_to_pytorch( tf_checkpoint_path, bert_config_file, pytorch_dump_folder_path, finetuning_task=None ): # Initialise PyTorch model config = XLNetConfig.from_json_file(bert_config_file) finetuning_task = finetuning_task.lower() if finetuning_task is not None else "" if finetuning_task in GLUE_TASKS_NUM_LABELS: print(f"Building PyTorch XLNetForSequenceClassification model from configuration: {config}") config.finetuning_task = finetuning_task config.num_labels = GLUE_TASKS_NUM_LABELS[finetuning_task] model = XLNetForSequenceClassification(config) elif "squad" in finetuning_task: config.finetuning_task = finetuning_task model = XLNetForQuestionAnswering(config) else: model = XLNetLMHeadModel(config) # Load weights from tf checkpoint load_tf_weights_in_xlnet(model, config, tf_checkpoint_path) # Save pytorch-model pytorch_weights_dump_path = os.path.join(pytorch_dump_folder_path, WEIGHTS_NAME) pytorch_config_dump_path = os.path.join(pytorch_dump_folder_path, CONFIG_NAME) print(f"Save PyTorch model to {os.path.abspath(pytorch_weights_dump_path)}") torch.save(model.state_dict(), pytorch_weights_dump_path) print(f"Save configuration file to {os.path.abspath(pytorch_config_dump_path)}") with open(pytorch_config_dump_path, "w", encoding="utf-8") as f: f.write(config.to_json_string()) if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--tf_checkpoint_path", default=None, type=str, required=True, help="Path to the TensorFlow checkpoint path." ) parser.add_argument( "--xlnet_config_file", default=None, type=str, required=True, help=( "The config json file corresponding to the pre-trained XLNet model. \n" "This specifies the model architecture." ), ) parser.add_argument( "--pytorch_dump_folder_path", default=None, type=str, required=True, help="Path to the folder to store the PyTorch model or dataset/vocab.", ) parser.add_argument( "--finetuning_task", default=None, type=str, help="Name of a task on which the XLNet TensorFlow model was fine-tuned", ) args = parser.parse_args() print(args) convert_xlnet_checkpoint_to_pytorch( args.tf_checkpoint_path, args.xlnet_config_file, args.pytorch_dump_folder_path, args.finetuning_task )
27182812/ChatGLM-LLaMA-chinese-insturct
10,024
src/transformers/models/xlnet/tokenization_xlnet_fast.py
# coding=utf-8 # Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Tokenization classes for XLNet model.""" import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import AddedToken from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tokenization_xlnet import XLNetTokenizer else: XLNetTokenizer = None logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "spiece.model", "tokenizer_file": "tokenizer.json"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "xlnet-base-cased": "https://huggingface.co/xlnet-base-cased/resolve/main/spiece.model", "xlnet-large-cased": "https://huggingface.co/xlnet-large-cased/resolve/main/spiece.model", }, "tokenizer_file": { "xlnet-base-cased": "https://huggingface.co/xlnet-base-cased/resolve/main/tokenizer.json", "xlnet-large-cased": "https://huggingface.co/xlnet-large-cased/resolve/main/tokenizer.json", }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "xlnet-base-cased": None, "xlnet-large-cased": None, } SPIECE_UNDERLINE = "▁" # Segments (not really needed) SEG_ID_A = 0 SEG_ID_B = 1 SEG_ID_CLS = 2 SEG_ID_SEP = 3 SEG_ID_PAD = 4 class XLNetTokenizerFast(PreTrainedTokenizerFast): """ Construct a "fast" XLNet tokenizer (backed by HuggingFace's *tokenizers* library). Based on [Unigram](https://huggingface.co/docs/tokenizers/python/latest/components.html?highlight=unigram#models). This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): [SentencePiece](https://github.com/google/sentencepiece) file (generally has a .spm extension) that contains the vocabulary necessary to instantiate a tokenizer. do_lower_case (`bool`, *optional*, defaults to `True`): Whether to lowercase the input when tokenizing. remove_space (`bool`, *optional*, defaults to `True`): Whether to strip the text when tokenizing (removing excess spaces before and after the string). keep_accents (`bool`, *optional*, defaults to `False`): Whether to keep accents when tokenizing. bos_token (`str`, *optional*, defaults to `"<s>"`): The beginning of sequence token that was used during pretraining. Can be used a sequence classifier token. <Tip> When building a sequence using special tokens, this is not the token that is used for the beginning of sequence. The token used is the `cls_token`. </Tip> eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. <Tip> When building a sequence using special tokens, this is not the token that is used for the end of sequence. The token used is the `sep_token`. </Tip> unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"<sep>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"<cls>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. additional_special_tokens (`List[str]`, *optional*, defaults to `["<eop>", "<eod>"]`): Additional special tokens used by the tokenizer. Attributes: sp_model (`SentencePieceProcessor`): The *SentencePiece* processor that is used for every conversion (string, tokens and IDs). """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES padding_side = "left" slow_tokenizer_class = XLNetTokenizer def __init__( self, vocab_file=None, tokenizer_file=None, do_lower_case=False, remove_space=True, keep_accents=False, bos_token="<s>", eos_token="</s>", unk_token="<unk>", sep_token="<sep>", pad_token="<pad>", cls_token="<cls>", mask_token="<mask>", additional_special_tokens=["<eop>", "<eod>"], **kwargs, ): # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token super().__init__( vocab_file=vocab_file, tokenizer_file=tokenizer_file, do_lower_case=do_lower_case, remove_space=remove_space, keep_accents=keep_accents, bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, additional_special_tokens=additional_special_tokens, **kwargs, ) self._pad_token_type_id = 3 self.do_lower_case = do_lower_case self.remove_space = remove_space self.keep_accents = keep_accents self.vocab_file = vocab_file self.can_save_slow_tokenizer = False if not self.vocab_file else True def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. An XLNet sequence has the following format: - single sequence: `X <sep> <cls>` - pair of sequences: `A <sep> B <sep> <cls>` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return token_ids_0 + sep + cls return token_ids_0 + sep + token_ids_1 + sep + cls def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. An XLNet sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls_segment_id = [2] if token_ids_1 is None: return len(token_ids_0 + sep) * [0] + cls_segment_id return len(token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] + cls_segment_id def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not self.can_save_slow_tokenizer: raise ValueError( "Your fast tokenizer does not have the necessary information to save the vocabulary for a slow " "tokenizer." ) if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file): copyfile(self.vocab_file, out_vocab_file) return (out_vocab_file,)
27182812/ChatGLM-LLaMA-chinese-insturct
15,992
src/transformers/models/xlnet/tokenization_xlnet.py
# coding=utf-8 # Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Tokenization classes for XLNet model.""" import os import unicodedata from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import SPIECE_UNDERLINE, logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "spiece.model"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "xlnet-base-cased": "https://huggingface.co/xlnet-base-cased/resolve/main/spiece.model", "xlnet-large-cased": "https://huggingface.co/xlnet-large-cased/resolve/main/spiece.model", } } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "xlnet-base-cased": None, "xlnet-large-cased": None, } # Segments (not really needed) SEG_ID_A = 0 SEG_ID_B = 1 SEG_ID_CLS = 2 SEG_ID_SEP = 3 SEG_ID_PAD = 4 class XLNetTokenizer(PreTrainedTokenizer): """ Construct an XLNet tokenizer. Based on [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): [SentencePiece](https://github.com/google/sentencepiece) file (generally has a .spm extension) that contains the vocabulary necessary to instantiate a tokenizer. do_lower_case (`bool`, *optional*, defaults to `True`): Whether to lowercase the input when tokenizing. remove_space (`bool`, *optional*, defaults to `True`): Whether to strip the text when tokenizing (removing excess spaces before and after the string). keep_accents (`bool`, *optional*, defaults to `False`): Whether to keep accents when tokenizing. bos_token (`str`, *optional*, defaults to `"<s>"`): The beginning of sequence token that was used during pretraining. Can be used a sequence classifier token. <Tip> When building a sequence using special tokens, this is not the token that is used for the beginning of sequence. The token used is the `cls_token`. </Tip> eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. <Tip> When building a sequence using special tokens, this is not the token that is used for the end of sequence. The token used is the `sep_token`. </Tip> unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"<sep>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"<cls>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. additional_special_tokens (`List[str]`, *optional*, defaults to `["<eop>", "<eod>"]`): Additional special tokens used by the tokenizer. sp_model_kwargs (`dict`, *optional*): Will be passed to the `SentencePieceProcessor.__init__()` method. The [Python wrapper for SentencePiece](https://github.com/google/sentencepiece/tree/master/python) can be used, among other things, to set: - `enable_sampling`: Enable subword regularization. - `nbest_size`: Sampling parameters for unigram. Invalid for BPE-Dropout. - `nbest_size = {0,1}`: No sampling is performed. - `nbest_size > 1`: samples from the nbest_size results. - `nbest_size < 0`: assuming that nbest_size is infinite and samples from the all hypothesis (lattice) using forward-filtering-and-backward-sampling algorithm. - `alpha`: Smoothing parameter for unigram sampling, and dropout probability of merge operations for BPE-dropout. Attributes: sp_model (`SentencePieceProcessor`): The *SentencePiece* processor that is used for every conversion (string, tokens and IDs). """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES padding_side = "left" def __init__( self, vocab_file, do_lower_case=False, remove_space=True, keep_accents=False, bos_token="<s>", eos_token="</s>", unk_token="<unk>", sep_token="<sep>", pad_token="<pad>", cls_token="<cls>", mask_token="<mask>", additional_special_tokens=["<eop>", "<eod>"], sp_model_kwargs: Optional[Dict[str, Any]] = None, **kwargs, ) -> None: # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token self.sp_model_kwargs = {} if sp_model_kwargs is None else sp_model_kwargs super().__init__( do_lower_case=do_lower_case, remove_space=remove_space, keep_accents=keep_accents, bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, additional_special_tokens=additional_special_tokens, sp_model_kwargs=self.sp_model_kwargs, **kwargs, ) self._pad_token_type_id = 3 self.do_lower_case = do_lower_case self.remove_space = remove_space self.keep_accents = keep_accents self.vocab_file = vocab_file self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(vocab_file) @property def vocab_size(self): return len(self.sp_model) def get_vocab(self): vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} vocab.update(self.added_tokens_encoder) return vocab def __getstate__(self): state = self.__dict__.copy() state["sp_model"] = None return state def __setstate__(self, d): self.__dict__ = d # for backward compatibility if not hasattr(self, "sp_model_kwargs"): self.sp_model_kwargs = {} self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(self.vocab_file) def preprocess_text(self, inputs): if self.remove_space: outputs = " ".join(inputs.strip().split()) else: outputs = inputs outputs = outputs.replace("``", '"').replace("''", '"') if not self.keep_accents: outputs = unicodedata.normalize("NFKD", outputs) outputs = "".join([c for c in outputs if not unicodedata.combining(c)]) if self.do_lower_case: outputs = outputs.lower() return outputs def _tokenize(self, text: str) -> List[str]: """Tokenize a string.""" text = self.preprocess_text(text) pieces = self.sp_model.encode(text, out_type=str) new_pieces = [] for piece in pieces: if len(piece) > 1 and piece[-1] == str(",") and piece[-2].isdigit(): cur_pieces = self.sp_model.EncodeAsPieces(piece[:-1].replace(SPIECE_UNDERLINE, "")) if piece[0] != SPIECE_UNDERLINE and cur_pieces[0][0] == SPIECE_UNDERLINE: if len(cur_pieces[0]) == 1: cur_pieces = cur_pieces[1:] else: cur_pieces[0] = cur_pieces[0][1:] cur_pieces.append(piece[-1]) new_pieces.extend(cur_pieces) else: new_pieces.append(piece) return new_pieces def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" return self.sp_model.PieceToId(token) def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" return self.sp_model.IdToPiece(index) def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (strings for sub-words) in a single string.""" out_string = "".join(tokens).replace(SPIECE_UNDERLINE, " ").strip() return out_string def _decode( self, token_ids: List[int], skip_special_tokens: bool = False, clean_up_tokenization_spaces: bool = True, spaces_between_special_tokens: bool = True, **kwargs, ) -> str: self._decode_use_source_tokenizer = kwargs.pop("use_source_tokenizer", False) filtered_tokens = self.convert_ids_to_tokens(token_ids, skip_special_tokens=skip_special_tokens) # To avoid mixing byte-level and unicode for byte-level BPT # we need to build string separately for added tokens and byte-level tokens # cf. https://github.com/huggingface/transformers/issues/1133 sub_texts = [] current_sub_text = [] for token in filtered_tokens: if skip_special_tokens and token in self.all_special_ids: continue if token in self.added_tokens_encoder: if current_sub_text: sub_texts.append(self.convert_tokens_to_string(current_sub_text)) current_sub_text = [] sub_texts.append(token) else: current_sub_text.append(token) if current_sub_text: sub_texts.append(self.convert_tokens_to_string(current_sub_text)) # Mimic the behavior of the Rust tokenizer: # By default, there are no spaces between special tokens text = "".join(sub_texts) if clean_up_tokenization_spaces: clean_text = self.clean_up_tokenization(text) return clean_text else: return text def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. An XLNet sequence has the following format: - single sequence: `X <sep> <cls>` - pair of sequences: `A <sep> B <sep> <cls>` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return token_ids_0 + sep + cls return token_ids_0 + sep + token_ids_1 + sep + cls def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_special_tokens (`bool`, *optional*, defaults to `False`): Whether or not the token list is already formatted with special tokens for the model. Returns: `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True ) if token_ids_1 is not None: return ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1, 1] return ([0] * len(token_ids_0)) + [1, 1] def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. An XLNet sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls_segment_id = [2] if token_ids_1 is None: return len(token_ids_0 + sep) * [0] + cls_segment_id return len(token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] + cls_segment_id def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file) and os.path.isfile(self.vocab_file): copyfile(self.vocab_file, out_vocab_file) elif not os.path.isfile(self.vocab_file): with open(out_vocab_file, "wb") as fi: content_spiece_model = self.sp_model.serialized_model_proto() fi.write(content_spiece_model) return (out_vocab_file,)
27182812/ChatGLM-LLaMA-chinese-insturct
93,009
src/transformers/models/xlnet/modeling_xlnet.py
# coding=utf-8 # Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch XLNet model. """ import warnings from dataclasses import dataclass from typing import List, Optional, Tuple, Union import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACT2FN from ...modeling_utils import PoolerAnswerClass, PoolerEndLogits, PoolerStartLogits, PreTrainedModel, SequenceSummary from ...pytorch_utils import apply_chunking_to_forward from ...utils import ( ModelOutput, add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_xlnet import XLNetConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "xlnet-base-cased" _CONFIG_FOR_DOC = "XLNetConfig" XLNET_PRETRAINED_MODEL_ARCHIVE_LIST = [ "xlnet-base-cased", "xlnet-large-cased", # See all XLNet models at https://huggingface.co/models?filter=xlnet ] def build_tf_xlnet_to_pytorch_map(model, config, tf_weights=None): """ A map of modules from TF to PyTorch. I use a map to keep the PyTorch model as identical to the original PyTorch model as possible. """ tf_to_pt_map = {} if hasattr(model, "transformer"): if hasattr(model, "lm_loss"): # We will load also the output bias tf_to_pt_map["model/lm_loss/bias"] = model.lm_loss.bias if hasattr(model, "sequence_summary") and "model/sequnece_summary/summary/kernel" in tf_weights: # We will load also the sequence summary tf_to_pt_map["model/sequnece_summary/summary/kernel"] = model.sequence_summary.summary.weight tf_to_pt_map["model/sequnece_summary/summary/bias"] = model.sequence_summary.summary.bias if ( hasattr(model, "logits_proj") and config.finetuning_task is not None and f"model/regression_{config.finetuning_task}/logit/kernel" in tf_weights ): tf_to_pt_map[f"model/regression_{config.finetuning_task}/logit/kernel"] = model.logits_proj.weight tf_to_pt_map[f"model/regression_{config.finetuning_task}/logit/bias"] = model.logits_proj.bias # Now load the rest of the transformer model = model.transformer # Embeddings and output tf_to_pt_map.update( { "model/transformer/word_embedding/lookup_table": model.word_embedding.weight, "model/transformer/mask_emb/mask_emb": model.mask_emb, } ) # Transformer blocks for i, b in enumerate(model.layer): layer_str = f"model/transformer/layer_{i}/" tf_to_pt_map.update( { layer_str + "rel_attn/LayerNorm/gamma": b.rel_attn.layer_norm.weight, layer_str + "rel_attn/LayerNorm/beta": b.rel_attn.layer_norm.bias, layer_str + "rel_attn/o/kernel": b.rel_attn.o, layer_str + "rel_attn/q/kernel": b.rel_attn.q, layer_str + "rel_attn/k/kernel": b.rel_attn.k, layer_str + "rel_attn/r/kernel": b.rel_attn.r, layer_str + "rel_attn/v/kernel": b.rel_attn.v, layer_str + "ff/LayerNorm/gamma": b.ff.layer_norm.weight, layer_str + "ff/LayerNorm/beta": b.ff.layer_norm.bias, layer_str + "ff/layer_1/kernel": b.ff.layer_1.weight, layer_str + "ff/layer_1/bias": b.ff.layer_1.bias, layer_str + "ff/layer_2/kernel": b.ff.layer_2.weight, layer_str + "ff/layer_2/bias": b.ff.layer_2.bias, } ) # Relative positioning biases if config.untie_r: r_r_list = [] r_w_list = [] r_s_list = [] seg_embed_list = [] for b in model.layer: r_r_list.append(b.rel_attn.r_r_bias) r_w_list.append(b.rel_attn.r_w_bias) r_s_list.append(b.rel_attn.r_s_bias) seg_embed_list.append(b.rel_attn.seg_embed) else: r_r_list = [model.r_r_bias] r_w_list = [model.r_w_bias] r_s_list = [model.r_s_bias] seg_embed_list = [model.seg_embed] tf_to_pt_map.update( { "model/transformer/r_r_bias": r_r_list, "model/transformer/r_w_bias": r_w_list, "model/transformer/r_s_bias": r_s_list, "model/transformer/seg_embed": seg_embed_list, } ) return tf_to_pt_map def load_tf_weights_in_xlnet(model, config, tf_path): """Load tf checkpoints in a pytorch model""" try: import numpy as np import tensorflow as tf except ImportError: logger.error( "Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see " "https://www.tensorflow.org/install/ for installation instructions." ) raise # Load weights from TF model init_vars = tf.train.list_variables(tf_path) tf_weights = {} for name, shape in init_vars: logger.info(f"Loading TF weight {name} with shape {shape}") array = tf.train.load_variable(tf_path, name) tf_weights[name] = array # Build TF to PyTorch weights loading map tf_to_pt_map = build_tf_xlnet_to_pytorch_map(model, config, tf_weights) for name, pointer in tf_to_pt_map.items(): logger.info(f"Importing {name}") if name not in tf_weights: logger.info(f"{name} not in tf pre-trained weights, skipping") continue array = tf_weights[name] # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model if "kernel" in name and ("ff" in name or "summary" in name or "logit" in name): logger.info("Transposing") array = np.transpose(array) if isinstance(pointer, list): # Here we will split the TF weights assert ( len(pointer) == array.shape[0] ), f"Pointer length {len(pointer)} and array length {array.shape[0]} mismatched" for i, p_i in enumerate(pointer): arr_i = array[i, ...] try: assert ( p_i.shape == arr_i.shape ), f"Pointer shape {p_i.shape} and array shape {arr_i.shape} mismatched" except AssertionError as e: e.args += (p_i.shape, arr_i.shape) raise logger.info(f"Initialize PyTorch weight {name} for layer {i}") p_i.data = torch.from_numpy(arr_i) else: try: assert ( pointer.shape == array.shape ), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched" except AssertionError as e: e.args += (pointer.shape, array.shape) raise logger.info(f"Initialize PyTorch weight {name}") pointer.data = torch.from_numpy(array) tf_weights.pop(name, None) tf_weights.pop(name + "/Adam", None) tf_weights.pop(name + "/Adam_1", None) logger.info(f"Weights not copied to PyTorch model: {', '.join(tf_weights.keys())}") return model class XLNetRelativeAttention(nn.Module): def __init__(self, config): super().__init__() if config.d_model % config.n_head != 0: raise ValueError( f"The hidden size ({config.d_model}) is not a multiple of the number of attention " f"heads ({config.n_head}" ) self.n_head = config.n_head self.d_head = config.d_head self.d_model = config.d_model self.scale = 1 / (config.d_head**0.5) self.q = nn.Parameter(torch.FloatTensor(config.d_model, self.n_head, self.d_head)) self.k = nn.Parameter(torch.FloatTensor(config.d_model, self.n_head, self.d_head)) self.v = nn.Parameter(torch.FloatTensor(config.d_model, self.n_head, self.d_head)) self.o = nn.Parameter(torch.FloatTensor(config.d_model, self.n_head, self.d_head)) self.r = nn.Parameter(torch.FloatTensor(config.d_model, self.n_head, self.d_head)) self.r_r_bias = nn.Parameter(torch.FloatTensor(self.n_head, self.d_head)) self.r_s_bias = nn.Parameter(torch.FloatTensor(self.n_head, self.d_head)) self.r_w_bias = nn.Parameter(torch.FloatTensor(self.n_head, self.d_head)) self.seg_embed = nn.Parameter(torch.FloatTensor(2, self.n_head, self.d_head)) self.layer_norm = nn.LayerNorm(config.d_model, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.dropout) def prune_heads(self, heads): raise NotImplementedError @staticmethod def rel_shift(x, klen=-1): """perform relative shift to form the relative attention score.""" x_size = x.shape x = x.reshape(x_size[1], x_size[0], x_size[2], x_size[3]) x = x[1:, ...] x = x.reshape(x_size[0], x_size[1] - 1, x_size[2], x_size[3]) # x = x[:, 0:klen, :, :] x = torch.index_select(x, 1, torch.arange(klen, device=x.device, dtype=torch.long)) return x @staticmethod def rel_shift_bnij(x, klen=-1): x_size = x.shape x = x.reshape(x_size[0], x_size[1], x_size[3], x_size[2]) x = x[:, :, 1:, :] x = x.reshape(x_size[0], x_size[1], x_size[2], x_size[3] - 1) # Note: the tensor-slice form was faster in my testing than torch.index_select # However, tracing doesn't like the nature of the slice, and if klen changes # during the run then it'll fail, whereas index_select will be fine. x = torch.index_select(x, 3, torch.arange(klen, device=x.device, dtype=torch.long)) # x = x[:, :, :, :klen] return x def rel_attn_core( self, q_head, k_head_h, v_head_h, k_head_r, seg_mat=None, attn_mask=None, head_mask=None, output_attentions=False, ): """Core relative positional attention operations.""" # content based attention score ac = torch.einsum("ibnd,jbnd->bnij", q_head + self.r_w_bias, k_head_h) # position based attention score bd = torch.einsum("ibnd,jbnd->bnij", q_head + self.r_r_bias, k_head_r) bd = self.rel_shift_bnij(bd, klen=ac.shape[3]) # segment based attention score if seg_mat is None: ef = 0 else: ef = torch.einsum("ibnd,snd->ibns", q_head + self.r_s_bias, self.seg_embed) ef = torch.einsum("ijbs,ibns->bnij", seg_mat, ef) # merge attention scores and perform masking attn_score = (ac + bd + ef) * self.scale if attn_mask is not None: # attn_score = attn_score * (1 - attn_mask) - 1e30 * attn_mask if attn_mask.dtype == torch.float16: attn_score = attn_score - 65500 * torch.einsum("ijbn->bnij", attn_mask) else: attn_score = attn_score - 1e30 * torch.einsum("ijbn->bnij", attn_mask) # attention probability attn_prob = nn.functional.softmax(attn_score, dim=3) attn_prob = self.dropout(attn_prob) # Mask heads if we want to if head_mask is not None: attn_prob = attn_prob * torch.einsum("ijbn->bnij", head_mask) # attention output attn_vec = torch.einsum("bnij,jbnd->ibnd", attn_prob, v_head_h) if output_attentions: return attn_vec, torch.einsum("bnij->ijbn", attn_prob) return attn_vec def post_attention(self, h, attn_vec, residual=True): """Post-attention processing.""" # post-attention projection (back to `d_model`) attn_out = torch.einsum("ibnd,hnd->ibh", attn_vec, self.o) attn_out = self.dropout(attn_out) if residual: attn_out = attn_out + h output = self.layer_norm(attn_out) return output def forward( self, h, g, attn_mask_h, attn_mask_g, r, seg_mat, mems=None, target_mapping=None, head_mask=None, output_attentions=False, ): if g is not None: # Two-stream attention with relative positional encoding. # content based attention score if mems is not None and mems.dim() > 1: cat = torch.cat([mems, h], dim=0) else: cat = h # content-based key head k_head_h = torch.einsum("ibh,hnd->ibnd", cat, self.k) # content-based value head v_head_h = torch.einsum("ibh,hnd->ibnd", cat, self.v) # position-based key head k_head_r = torch.einsum("ibh,hnd->ibnd", r, self.r) # h-stream # content-stream query head q_head_h = torch.einsum("ibh,hnd->ibnd", h, self.q) # core attention ops attn_vec_h = self.rel_attn_core( q_head_h, k_head_h, v_head_h, k_head_r, seg_mat=seg_mat, attn_mask=attn_mask_h, head_mask=head_mask, output_attentions=output_attentions, ) if output_attentions: attn_vec_h, attn_prob_h = attn_vec_h # post processing output_h = self.post_attention(h, attn_vec_h) # g-stream # query-stream query head q_head_g = torch.einsum("ibh,hnd->ibnd", g, self.q) # core attention ops if target_mapping is not None: q_head_g = torch.einsum("mbnd,mlb->lbnd", q_head_g, target_mapping) attn_vec_g = self.rel_attn_core( q_head_g, k_head_h, v_head_h, k_head_r, seg_mat=seg_mat, attn_mask=attn_mask_g, head_mask=head_mask, output_attentions=output_attentions, ) if output_attentions: attn_vec_g, attn_prob_g = attn_vec_g attn_vec_g = torch.einsum("lbnd,mlb->mbnd", attn_vec_g, target_mapping) else: attn_vec_g = self.rel_attn_core( q_head_g, k_head_h, v_head_h, k_head_r, seg_mat=seg_mat, attn_mask=attn_mask_g, head_mask=head_mask, output_attentions=output_attentions, ) if output_attentions: attn_vec_g, attn_prob_g = attn_vec_g # post processing output_g = self.post_attention(g, attn_vec_g) if output_attentions: attn_prob = attn_prob_h, attn_prob_g else: # Multi-head attention with relative positional encoding if mems is not None and mems.dim() > 1: cat = torch.cat([mems, h], dim=0) else: cat = h # content heads q_head_h = torch.einsum("ibh,hnd->ibnd", h, self.q) k_head_h = torch.einsum("ibh,hnd->ibnd", cat, self.k) v_head_h = torch.einsum("ibh,hnd->ibnd", cat, self.v) # positional heads # type casting for fp16 support k_head_r = torch.einsum("ibh,hnd->ibnd", r.type(self.r.dtype), self.r) # core attention ops attn_vec = self.rel_attn_core( q_head_h, k_head_h, v_head_h, k_head_r, seg_mat=seg_mat, attn_mask=attn_mask_h, head_mask=head_mask, output_attentions=output_attentions, ) if output_attentions: attn_vec, attn_prob = attn_vec # post processing output_h = self.post_attention(h, attn_vec) output_g = None outputs = (output_h, output_g) if output_attentions: outputs = outputs + (attn_prob,) return outputs class XLNetFeedForward(nn.Module): def __init__(self, config): super().__init__() self.layer_norm = nn.LayerNorm(config.d_model, eps=config.layer_norm_eps) self.layer_1 = nn.Linear(config.d_model, config.d_inner) self.layer_2 = nn.Linear(config.d_inner, config.d_model) self.dropout = nn.Dropout(config.dropout) if isinstance(config.ff_activation, str): self.activation_function = ACT2FN[config.ff_activation] else: self.activation_function = config.ff_activation def forward(self, inp): output = inp output = self.layer_1(output) output = self.activation_function(output) output = self.dropout(output) output = self.layer_2(output) output = self.dropout(output) output = self.layer_norm(output + inp) return output class XLNetLayer(nn.Module): def __init__(self, config): super().__init__() self.rel_attn = XLNetRelativeAttention(config) self.ff = XLNetFeedForward(config) self.dropout = nn.Dropout(config.dropout) self.chunk_size_feed_forward = config.chunk_size_feed_forward self.seq_len_dim = 1 def forward( self, output_h, output_g, attn_mask_h, attn_mask_g, r, seg_mat, mems=None, target_mapping=None, head_mask=None, output_attentions=False, ): outputs = self.rel_attn( output_h, output_g, attn_mask_h, attn_mask_g, r, seg_mat, mems=mems, target_mapping=target_mapping, head_mask=head_mask, output_attentions=output_attentions, ) output_h, output_g = outputs[:2] if output_g is not None: output_g = apply_chunking_to_forward( self.ff_chunk, self.chunk_size_feed_forward, self.seq_len_dim, output_g ) output_h = apply_chunking_to_forward(self.ff_chunk, self.chunk_size_feed_forward, self.seq_len_dim, output_h) outputs = (output_h, output_g) + outputs[2:] # Add again attentions if there are there return outputs def ff_chunk(self, output_x): output_x = self.ff(output_x) return output_x class XLNetPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = XLNetConfig load_tf_weights = load_tf_weights_in_xlnet base_model_prefix = "transformer" def _init_weights(self, module): """Initialize the weights.""" if isinstance(module, nn.Linear): # Slightly different from the TF version which uses truncated_normal for initialization # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() elif isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) elif isinstance(module, XLNetRelativeAttention): for param in [ module.q, module.k, module.v, module.o, module.r, module.r_r_bias, module.r_s_bias, module.r_w_bias, module.seg_embed, ]: param.data.normal_(mean=0.0, std=self.config.initializer_range) elif isinstance(module, XLNetModel): module.mask_emb.data.normal_(mean=0.0, std=self.config.initializer_range) @dataclass class XLNetModelOutput(ModelOutput): """ Output type of [`XLNetModel`]. Args: last_hidden_state (`torch.FloatTensor` of shape `(batch_size, num_predict, hidden_size)`): Sequence of hidden-states at the last layer of the model. `num_predict` corresponds to `target_mapping.shape[1]`. If `target_mapping` is `None`, then `num_predict` corresponds to `sequence_length`. mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ last_hidden_state: torch.FloatTensor mems: Optional[List[torch.FloatTensor]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class XLNetLMHeadModelOutput(ModelOutput): """ Output type of [`XLNetLMHeadModel`]. Args: loss (`torch.FloatTensor` of shape *(1,)*, *optional*, returned when `labels` is provided) Language modeling loss (for next-token prediction). logits (`torch.FloatTensor` of shape `(batch_size, num_predict, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). `num_predict` corresponds to `target_mapping.shape[1]`. If `target_mapping` is `None`, then `num_predict` corresponds to `sequence_length`. mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None logits: torch.FloatTensor = None mems: Optional[List[torch.FloatTensor]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class XLNetForSequenceClassificationOutput(ModelOutput): """ Output type of [`XLNetForSequenceClassification`]. Args: loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `label` is provided): Classification (or regression if config.num_labels==1) loss. logits (`torch.FloatTensor` of shape `(batch_size, config.num_labels)`): Classification (or regression if config.num_labels==1) scores (before SoftMax). mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None logits: torch.FloatTensor = None mems: Optional[List[torch.FloatTensor]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class XLNetForTokenClassificationOutput(ModelOutput): """ Output type of [`XLNetForTokenClassificationOutput`]. Args: loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided) : Classification loss. logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.num_labels)`): Classification scores (before SoftMax). mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None logits: torch.FloatTensor = None mems: Optional[List[torch.FloatTensor]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class XLNetForMultipleChoiceOutput(ModelOutput): """ Output type of [`XLNetForMultipleChoice`]. Args: loss (`torch.FloatTensor` of shape *(1,)*, *optional*, returned when `labels` is provided): Classification loss. logits (`torch.FloatTensor` of shape `(batch_size, num_choices)`): *num_choices* is the second dimension of the input tensors. (see *input_ids* above). Classification scores (before SoftMax). mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None logits: torch.FloatTensor = None mems: Optional[List[torch.FloatTensor]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class XLNetForQuestionAnsweringSimpleOutput(ModelOutput): """ Output type of [`XLNetForQuestionAnsweringSimple`]. Args: loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided): Total span extraction loss is the sum of a Cross-Entropy for the start and end positions. start_logits (`torch.FloatTensor` of shape `(batch_size, sequence_length,)`): Span-start scores (before SoftMax). end_logits (`torch.FloatTensor` of shape `(batch_size, sequence_length,)`): Span-end scores (before SoftMax). mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None start_logits: torch.FloatTensor = None end_logits: torch.FloatTensor = None mems: Optional[List[torch.FloatTensor]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class XLNetForQuestionAnsweringOutput(ModelOutput): """ Output type of [`XLNetForQuestionAnswering`]. Args: loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned if both `start_positions` and `end_positions` are provided): Classification loss as the sum of start token, end token (and is_impossible if provided) classification losses. start_top_log_probs (`torch.FloatTensor` of shape `(batch_size, config.start_n_top)`, *optional*, returned if `start_positions` or `end_positions` is not provided): Log probabilities for the top config.start_n_top start token possibilities (beam-search). start_top_index (`torch.LongTensor` of shape `(batch_size, config.start_n_top)`, *optional*, returned if `start_positions` or `end_positions` is not provided): Indices for the top config.start_n_top start token possibilities (beam-search). end_top_log_probs (`torch.FloatTensor` of shape `(batch_size, config.start_n_top * config.end_n_top)`, *optional*, returned if `start_positions` or `end_positions` is not provided): Log probabilities for the top `config.start_n_top * config.end_n_top` end token possibilities (beam-search). end_top_index (`torch.LongTensor` of shape `(batch_size, config.start_n_top * config.end_n_top)`, *optional*, returned if `start_positions` or `end_positions` is not provided): Indices for the top `config.start_n_top * config.end_n_top` end token possibilities (beam-search). cls_logits (`torch.FloatTensor` of shape `(batch_size,)`, *optional*, returned if `start_positions` or `end_positions` is not provided): Log probabilities for the `is_impossible` label of the answers. mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None start_top_log_probs: Optional[torch.FloatTensor] = None start_top_index: Optional[torch.LongTensor] = None end_top_log_probs: Optional[torch.FloatTensor] = None end_top_index: Optional[torch.LongTensor] = None cls_logits: Optional[torch.FloatTensor] = None mems: Optional[List[torch.FloatTensor]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None XLNET_START_DOCSTRING = r""" This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`XLNetConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ XLNET_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states (see `mems` output below) . Can be used to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. `use_mems` has to be set to `True` to make use of `mems`. perm_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length, sequence_length)`, *optional*): Mask to indicate the attention pattern for each input token with values selected in `[0, 1]`: - if `perm_mask[k, i, j] = 0`, i attend to j in batch k; - if `perm_mask[k, i, j] = 1`, i does not attend to j in batch k. If not set, each token attends to all the others (full bidirectional attention). Only used during pretraining (to define factorization order) or for sequential decoding (generation). target_mapping (`torch.FloatTensor` of shape `(batch_size, num_predict, sequence_length)`, *optional*): Mask to indicate the output tokens to use. If `target_mapping[k, i, j] = 1`, the i-th predict in batch k is on the j-th token. Only used during pretraining for partial prediction or for sequential decoding (generation). token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) input_mask (`torch.FloatTensor` of shape `{0}`, *optional*): Mask to avoid performing attention on padding token indices. Negative of `attention_mask`, i.e. with 0 for real tokens and 1 for padding which is kept for compatibility with the original code base. Mask values selected in `[0, 1]`: - 1 for tokens that are **masked**, - 0 for tokens that are **not masked**. You can only uses one of `input_mask` and `attention_mask`. head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare XLNet Model transformer outputting raw hidden-states without any specific head on top.", XLNET_START_DOCSTRING, ) class XLNetModel(XLNetPreTrainedModel): def __init__(self, config): super().__init__(config) self.mem_len = config.mem_len self.reuse_len = config.reuse_len self.d_model = config.d_model self.same_length = config.same_length self.attn_type = config.attn_type self.bi_data = config.bi_data self.clamp_len = config.clamp_len self.n_layer = config.n_layer self.word_embedding = nn.Embedding(config.vocab_size, config.d_model) self.mask_emb = nn.Parameter(torch.FloatTensor(1, 1, config.d_model)) self.layer = nn.ModuleList([XLNetLayer(config) for _ in range(config.n_layer)]) self.dropout = nn.Dropout(config.dropout) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.word_embedding def set_input_embeddings(self, new_embeddings): self.word_embedding = new_embeddings def _prune_heads(self, heads_to_prune): raise NotImplementedError def create_mask(self, qlen, mlen): """ Creates causal attention mask. Float mask where 1.0 indicates masked, 0.0 indicates not-masked. Args: qlen: Sequence length mlen: Mask length :: same_length=False: same_length=True: <mlen > < qlen > <mlen > < qlen > ^ [0 0 0 0 0 1 1 1 1] [0 0 0 0 0 1 1 1 1] [0 0 0 0 0 0 1 1 1] [1 0 0 0 0 0 1 1 1] qlen [0 0 0 0 0 0 0 1 1] [1 1 0 0 0 0 0 1 1] [0 0 0 0 0 0 0 0 1] [1 1 1 0 0 0 0 0 1] v [0 0 0 0 0 0 0 0 0] [1 1 1 1 0 0 0 0 0] """ attn_mask = torch.ones([qlen, qlen]) mask_up = torch.triu(attn_mask, diagonal=1) attn_mask_pad = torch.zeros([qlen, mlen]) ret = torch.cat([attn_mask_pad, mask_up], dim=1) if self.same_length: mask_lo = torch.tril(attn_mask, diagonal=-1) ret = torch.cat([ret[:, :qlen] + mask_lo, ret[:, qlen:]], dim=1) ret = ret.to(self.device) return ret def cache_mem(self, curr_out, prev_mem): # cache hidden states into memory. if self.reuse_len is not None and self.reuse_len > 0: curr_out = curr_out[: self.reuse_len] if self.mem_len is None or self.mem_len == 0: # If `use_mems` is active but no `mem_len` is defined, the model behaves like GPT-2 at inference time # and returns all of the past and current hidden states. cutoff = 0 else: # If `use_mems` is active and `mem_len` is defined, the model returns the last `mem_len` hidden # states. This is the preferred setting for training and long-form generation. cutoff = -self.mem_len if prev_mem is None: # if `use_mems` is active and `mem_len` is defined, the model new_mem = curr_out[cutoff:] else: new_mem = torch.cat([prev_mem, curr_out], dim=0)[cutoff:] return new_mem.detach() @staticmethod def positional_embedding(pos_seq, inv_freq, bsz=None): sinusoid_inp = torch.einsum("i,d->id", pos_seq, inv_freq) pos_emb = torch.cat([torch.sin(sinusoid_inp), torch.cos(sinusoid_inp)], dim=-1) pos_emb = pos_emb[:, None, :] if bsz is not None: pos_emb = pos_emb.expand(-1, bsz, -1) return pos_emb def relative_positional_encoding(self, qlen, klen, bsz=None): # create relative positional encoding. freq_seq = torch.arange(0, self.d_model, 2.0, dtype=torch.float) inv_freq = 1 / torch.pow(10000, (freq_seq / self.d_model)) if self.attn_type == "bi": # beg, end = klen - 1, -qlen beg, end = klen, -qlen elif self.attn_type == "uni": # beg, end = klen - 1, -1 beg, end = klen, -1 else: raise ValueError(f"Unknown `attn_type` {self.attn_type}.") if self.bi_data: fwd_pos_seq = torch.arange(beg, end, -1.0, dtype=torch.float) bwd_pos_seq = torch.arange(-beg, -end, 1.0, dtype=torch.float) if self.clamp_len > 0: fwd_pos_seq = fwd_pos_seq.clamp(-self.clamp_len, self.clamp_len) bwd_pos_seq = bwd_pos_seq.clamp(-self.clamp_len, self.clamp_len) if bsz is not None: fwd_pos_emb = self.positional_embedding(fwd_pos_seq, inv_freq, bsz // 2) bwd_pos_emb = self.positional_embedding(bwd_pos_seq, inv_freq, bsz // 2) else: fwd_pos_emb = self.positional_embedding(fwd_pos_seq, inv_freq) bwd_pos_emb = self.positional_embedding(bwd_pos_seq, inv_freq) pos_emb = torch.cat([fwd_pos_emb, bwd_pos_emb], dim=1) else: fwd_pos_seq = torch.arange(beg, end, -1.0) if self.clamp_len > 0: fwd_pos_seq = fwd_pos_seq.clamp(-self.clamp_len, self.clamp_len) pos_emb = self.positional_embedding(fwd_pos_seq, inv_freq, bsz) return pos_emb @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=XLNetModelOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, mems: Optional[torch.Tensor] = None, perm_mask: Optional[torch.Tensor] = None, target_mapping: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, input_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, # delete after depreciation warning is removed ) -> Union[Tuple, XLNetModelOutput]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if "use_cache" in kwargs: warnings.warn( "The `use_cache` argument is deprecated and will be removed in a future version, use `use_mems`" " instead.", FutureWarning, ) use_mems = kwargs["use_cache"] if self.training: use_mems = use_mems if use_mems is not None else self.config.use_mems_train else: use_mems = use_mems if use_mems is not None else self.config.use_mems_eval # the original code for XLNet uses shapes [len, bsz] with the batch dimension at the end # but we want a unified interface in the library with the batch size on the first dimension # so we move here the first dimension (batch) to the end if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_ids = input_ids.transpose(0, 1).contiguous() qlen, bsz = input_ids.shape[0], input_ids.shape[1] elif inputs_embeds is not None: inputs_embeds = inputs_embeds.transpose(0, 1).contiguous() qlen, bsz = inputs_embeds.shape[0], inputs_embeds.shape[1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") token_type_ids = token_type_ids.transpose(0, 1).contiguous() if token_type_ids is not None else None input_mask = input_mask.transpose(0, 1).contiguous() if input_mask is not None else None attention_mask = attention_mask.transpose(0, 1).contiguous() if attention_mask is not None else None perm_mask = perm_mask.permute(1, 2, 0).contiguous() if perm_mask is not None else None target_mapping = target_mapping.permute(1, 2, 0).contiguous() if target_mapping is not None else None mlen = mems[0].shape[0] if mems is not None and mems[0] is not None else 0 klen = mlen + qlen dtype_float = self.dtype device = self.device # Attention mask # causal attention mask if self.attn_type == "uni": attn_mask = self.create_mask(qlen, mlen) attn_mask = attn_mask[:, :, None, None] elif self.attn_type == "bi": attn_mask = None else: raise ValueError(f"Unsupported attention type: {self.attn_type}") # data mask: input mask & perm mask assert input_mask is None or attention_mask is None, "You can only use one of input_mask (uses 1 for padding) " "or attention_mask (uses 0 for padding, added for compatibility with BERT). Please choose one." if input_mask is None and attention_mask is not None: input_mask = 1.0 - attention_mask if input_mask is not None and perm_mask is not None: data_mask = input_mask[None] + perm_mask elif input_mask is not None and perm_mask is None: data_mask = input_mask[None] elif input_mask is None and perm_mask is not None: data_mask = perm_mask else: data_mask = None if data_mask is not None: # all mems can be attended to if mlen > 0: mems_mask = torch.zeros([data_mask.shape[0], mlen, bsz]).to(data_mask) data_mask = torch.cat([mems_mask, data_mask], dim=1) if attn_mask is None: attn_mask = data_mask[:, :, :, None] else: attn_mask += data_mask[:, :, :, None] if attn_mask is not None: attn_mask = (attn_mask > 0).to(dtype_float) if attn_mask is not None: non_tgt_mask = -torch.eye(qlen).to(attn_mask) if mlen > 0: non_tgt_mask = torch.cat([torch.zeros([qlen, mlen]).to(attn_mask), non_tgt_mask], dim=-1) non_tgt_mask = ((attn_mask + non_tgt_mask[:, :, None, None]) > 0).to(attn_mask) else: non_tgt_mask = None # Word embeddings and prepare h & g hidden states if inputs_embeds is not None: word_emb_k = inputs_embeds else: word_emb_k = self.word_embedding(input_ids) output_h = self.dropout(word_emb_k) if target_mapping is not None: word_emb_q = self.mask_emb.expand(target_mapping.shape[0], bsz, -1) # else: # We removed the inp_q input which was same as target mapping # inp_q_ext = inp_q[:, :, None] # word_emb_q = inp_q_ext * self.mask_emb + (1 - inp_q_ext) * word_emb_k output_g = self.dropout(word_emb_q) else: output_g = None # Segment embedding if token_type_ids is not None: # Convert `token_type_ids` to one-hot `seg_mat` if mlen > 0: mem_pad = torch.zeros([mlen, bsz], dtype=torch.long, device=device) cat_ids = torch.cat([mem_pad, token_type_ids], dim=0) else: cat_ids = token_type_ids # `1` indicates not in the same segment [qlen x klen x bsz] seg_mat = (token_type_ids[:, None] != cat_ids[None, :]).long() seg_mat = nn.functional.one_hot(seg_mat, num_classes=2).to(dtype_float) else: seg_mat = None # Positional encoding pos_emb = self.relative_positional_encoding(qlen, klen, bsz=bsz) pos_emb = pos_emb.to(output_h.device) pos_emb = self.dropout(pos_emb) # Prepare head mask if needed # 1.0 in head_mask indicate we keep the head # attention_probs has shape bsz x n_heads x N x N # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] (a head_mask for each layer) # and head_mask is converted to shape [num_hidden_layers x qlen x klen x bsz x n_head] if head_mask is not None: if head_mask.dim() == 1: head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(0).unsqueeze(0) head_mask = head_mask.expand(self.n_layer, -1, -1, -1, -1) elif head_mask.dim() == 2: head_mask = head_mask.unsqueeze(1).unsqueeze(1).unsqueeze(1) head_mask = head_mask.to( dtype=next(self.parameters()).dtype ) # switch to float if need + fp16 compatibility else: head_mask = [None] * self.n_layer new_mems = () if mems is None: mems = [None] * len(self.layer) attentions = [] if output_attentions else None hidden_states = [] if output_hidden_states else None for i, layer_module in enumerate(self.layer): if use_mems: # cache new mems new_mems = new_mems + (self.cache_mem(output_h, mems[i]),) if output_hidden_states: hidden_states.append((output_h, output_g) if output_g is not None else output_h) outputs = layer_module( output_h, output_g, attn_mask_h=non_tgt_mask, attn_mask_g=attn_mask, r=pos_emb, seg_mat=seg_mat, mems=mems[i], target_mapping=target_mapping, head_mask=head_mask[i], output_attentions=output_attentions, ) output_h, output_g = outputs[:2] if output_attentions: attentions.append(outputs[2]) # Add last hidden state if output_hidden_states: hidden_states.append((output_h, output_g) if output_g is not None else output_h) output = self.dropout(output_g if output_g is not None else output_h) # Prepare outputs, we transpose back here to shape [bsz, len, hidden_dim] (cf. beginning of forward() method) output = output.permute(1, 0, 2).contiguous() if not use_mems: new_mems = None if output_hidden_states: if output_g is not None: hidden_states = tuple(h.permute(1, 0, 2).contiguous() for hs in hidden_states for h in hs) else: hidden_states = tuple(hs.permute(1, 0, 2).contiguous() for hs in hidden_states) if output_attentions: if target_mapping is not None: # when target_mapping is provided, there are 2-tuple of attentions attentions = tuple( tuple(att_stream.permute(2, 3, 0, 1).contiguous() for att_stream in t) for t in attentions ) else: attentions = tuple(t.permute(2, 3, 0, 1).contiguous() for t in attentions) if not return_dict: return tuple(v for v in [output, new_mems, hidden_states, attentions] if v is not None) return XLNetModelOutput( last_hidden_state=output, mems=new_mems, hidden_states=hidden_states, attentions=attentions ) @add_start_docstrings( """ XLNet Model with a language modeling head on top (linear layer with weights tied to the input embeddings). """, XLNET_START_DOCSTRING, ) class XLNetLMHeadModel(XLNetPreTrainedModel): _keys_to_ignore_on_load_missing = [r"lm_loss.weight"] def __init__(self, config): super().__init__(config) self.attn_type = config.attn_type self.same_length = config.same_length self.transformer = XLNetModel(config) self.lm_loss = nn.Linear(config.d_model, config.vocab_size, bias=True) # Initialize weights and apply final processing self.post_init() def get_output_embeddings(self): return self.lm_loss def set_output_embeddings(self, new_embeddings): self.lm_loss = new_embeddings def prepare_inputs_for_generation(self, input_ids, past_key_values=None, use_mems=None, **kwargs): # Add dummy token at the end (no attention on this one) effective_batch_size = input_ids.shape[0] dummy_token = torch.zeros((effective_batch_size, 1), dtype=torch.long, device=input_ids.device) # At every pass, the attention values for the new token and the two last generated tokens # are computed, the rest is reloaded from the `past` cache. A purely auto-regressive model would have # offset = 1; offset = 2 seems to have slightly better computation. offset = 2 if past_key_values: input_ids = torch.cat([input_ids[:, -offset:], dummy_token], dim=1) else: input_ids = torch.cat([input_ids, dummy_token], dim=1) # Build permutation mask so that previous tokens don't see last token sequence_length = input_ids.shape[1] perm_mask = torch.zeros( (effective_batch_size, sequence_length, sequence_length), dtype=torch.float, device=input_ids.device ) perm_mask[:, :, -1] = 1.0 # We'll only predict the last token target_mapping = torch.zeros( (effective_batch_size, 1, sequence_length), dtype=torch.float, device=input_ids.device ) target_mapping[:, 0, -1] = 1.0 inputs = { "input_ids": input_ids, "perm_mask": perm_mask, "target_mapping": target_mapping, "use_mems": use_mems, } # if past is defined in model kwargs then use it for faster decoding if past_key_values: inputs["mems"] = tuple(layer_past[:-offset, :, :] for layer_past in past_key_values) return inputs @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=XLNetLMHeadModelOutput, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, mems: Optional[torch.Tensor] = None, perm_mask: Optional[torch.Tensor] = None, target_mapping: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, input_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, labels: Optional[torch.Tensor] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, # delete when `use_cache` is removed in XLNetModel ) -> Union[Tuple, XLNetLMHeadModelOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, num_predict)`, *optional*): Labels for masked language modeling. `num_predict` corresponds to `target_mapping.shape[1]`. If `target_mapping` is `None`, then `num_predict` corresponds to `sequence_length`. The labels should correspond to the masked input words that should be predicted and depends on `target_mapping`. Note in order to perform standard auto-regressive language modeling a *<mask>* token has to be added to the `input_ids` (see the `prepare_inputs_for_generation` function and examples below) Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100` are ignored, the loss is only computed for labels in `[0, ..., config.vocab_size]` Return: Examples: ```python >>> from transformers import AutoTokenizer, XLNetLMHeadModel >>> import torch >>> tokenizer = AutoTokenizer.from_pretrained("xlnet-large-cased") >>> model = XLNetLMHeadModel.from_pretrained("xlnet-large-cased") >>> # We show how to setup inputs to predict a next token using a bi-directional context. >>> input_ids = torch.tensor( ... tokenizer.encode("Hello, my dog is very <mask>", add_special_tokens=False) ... ).unsqueeze( ... 0 ... ) # We will predict the masked token >>> perm_mask = torch.zeros((1, input_ids.shape[1], input_ids.shape[1]), dtype=torch.float) >>> perm_mask[:, :, -1] = 1.0 # Previous tokens don't see last token >>> target_mapping = torch.zeros( ... (1, 1, input_ids.shape[1]), dtype=torch.float ... ) # Shape [1, 1, seq_length] => let's predict one token >>> target_mapping[ ... 0, 0, -1 ... ] = 1.0 # Our first (and only) prediction will be the last token of the sequence (the masked token) >>> outputs = model(input_ids, perm_mask=perm_mask, target_mapping=target_mapping) >>> next_token_logits = outputs[ ... 0 ... ] # Output has shape [target_mapping.size(0), target_mapping.size(1), config.vocab_size] >>> # The same way can the XLNetLMHeadModel be used to be trained by standard auto-regressive language modeling. >>> input_ids = torch.tensor( ... tokenizer.encode("Hello, my dog is very <mask>", add_special_tokens=False) ... ).unsqueeze( ... 0 ... ) # We will predict the masked token >>> labels = torch.tensor(tokenizer.encode("cute", add_special_tokens=False)).unsqueeze(0) >>> assert labels.shape[0] == 1, "only one word will be predicted" >>> perm_mask = torch.zeros((1, input_ids.shape[1], input_ids.shape[1]), dtype=torch.float) >>> perm_mask[ ... :, :, -1 ... ] = 1.0 # Previous tokens don't see last token as is done in standard auto-regressive lm training >>> target_mapping = torch.zeros( ... (1, 1, input_ids.shape[1]), dtype=torch.float ... ) # Shape [1, 1, seq_length] => let's predict one token >>> target_mapping[ ... 0, 0, -1 ... ] = 1.0 # Our first (and only) prediction will be the last token of the sequence (the masked token) >>> outputs = model(input_ids, perm_mask=perm_mask, target_mapping=target_mapping, labels=labels) >>> loss = outputs.loss >>> next_token_logits = ( ... outputs.logits ... ) # Logits have shape [target_mapping.size(0), target_mapping.size(1), config.vocab_size] ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict transformer_outputs = self.transformer( input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, **kwargs, ) logits = self.lm_loss(transformer_outputs[0]) loss = None if labels is not None: # Flatten the tokens loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, logits.size(-1)), labels.view(-1)) if not return_dict: output = (logits,) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return XLNetLMHeadModelOutput( loss=loss, logits=logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) @staticmethod def _reorder_cache(mems: List[torch.Tensor], beam_idx: torch.Tensor) -> List[torch.Tensor]: """ This function is used to re-order the `mems` cache if [`~PreTrainedModel.beam_search`] or [`~PreTrainedModel.beam_sample`] is called. This is required to match `mems` with the correct beam_idx at every generation step. """ return [layer_past.index_select(1, beam_idx.to(layer_past.device)) for layer_past in mems] @add_start_docstrings( """ XLNet Model with a sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for GLUE tasks. """, XLNET_START_DOCSTRING, ) class XLNetForSequenceClassification(XLNetPreTrainedModel): def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.config = config self.transformer = XLNetModel(config) self.sequence_summary = SequenceSummary(config) self.logits_proj = nn.Linear(config.d_model, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=XLNetForSequenceClassificationOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, mems: Optional[torch.Tensor] = None, perm_mask: Optional[torch.Tensor] = None, target_mapping: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, input_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, labels: Optional[torch.Tensor] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, # delete when `use_cache` is removed in XLNetModel ) -> Union[Tuple, XLNetForSequenceClassificationOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict transformer_outputs = self.transformer( input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, **kwargs, ) output = transformer_outputs[0] output = self.sequence_summary(output) logits = self.logits_proj(output) loss = None if labels is not None: if self.config.problem_type is None: if self.num_labels == 1: self.config.problem_type = "regression" elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): self.config.problem_type = "single_label_classification" else: self.config.problem_type = "multi_label_classification" if self.config.problem_type == "regression": loss_fct = MSELoss() if self.num_labels == 1: loss = loss_fct(logits.squeeze(), labels.squeeze()) else: loss = loss_fct(logits, labels) elif self.config.problem_type == "single_label_classification": loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) elif self.config.problem_type == "multi_label_classification": loss_fct = BCEWithLogitsLoss() loss = loss_fct(logits, labels) if not return_dict: output = (logits,) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return XLNetForSequenceClassificationOutput( loss=loss, logits=logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) @add_start_docstrings( """ XLNet Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. """, XLNET_START_DOCSTRING, ) class XLNetForTokenClassification(XLNetPreTrainedModel): def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.transformer = XLNetModel(config) self.classifier = nn.Linear(config.hidden_size, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=XLNetForTokenClassificationOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, mems: Optional[torch.Tensor] = None, perm_mask: Optional[torch.Tensor] = None, target_mapping: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, input_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, labels: Optional[torch.Tensor] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, # delete when `use_cache` is removed in XLNetModel ) -> Union[Tuple, XLNetForTokenClassificationOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the multiple choice classification loss. Indices should be in `[0, ..., num_choices]` where *num_choices* is the size of the second dimension of the input tensors. (see *input_ids* above) """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.transformer( input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] logits = self.classifier(sequence_output) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return XLNetForTokenClassificationOutput( loss=loss, logits=logits, mems=outputs.mems, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ XLNet Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a softmax) e.g. for RACE/SWAG tasks. """, XLNET_START_DOCSTRING, ) class XLNetForMultipleChoice(XLNetPreTrainedModel): def __init__(self, config): super().__init__(config) self.transformer = XLNetModel(config) self.sequence_summary = SequenceSummary(config) self.logits_proj = nn.Linear(config.d_model, 1) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, num_choices, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=XLNetForMultipleChoiceOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, input_mask: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, mems: Optional[torch.Tensor] = None, perm_mask: Optional[torch.Tensor] = None, target_mapping: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, labels: Optional[torch.Tensor] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, # delete when `use_cache` is removed in XLNetModel ) -> Union[Tuple, XLNetForMultipleChoiceOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the multiple choice classification loss. Indices should be in `[0, ..., num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See `input_ids` above) """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict num_choices = input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1] flat_input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None flat_token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None flat_attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None flat_input_mask = input_mask.view(-1, input_mask.size(-1)) if input_mask is not None else None flat_inputs_embeds = ( inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1)) if inputs_embeds is not None else None ) transformer_outputs = self.transformer( flat_input_ids, token_type_ids=flat_token_type_ids, input_mask=flat_input_mask, attention_mask=flat_attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, head_mask=head_mask, inputs_embeds=flat_inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, **kwargs, ) output = transformer_outputs[0] output = self.sequence_summary(output) logits = self.logits_proj(output) reshaped_logits = logits.view(-1, num_choices) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(reshaped_logits, labels.view(-1)) if not return_dict: output = (reshaped_logits,) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return XLNetForMultipleChoiceOutput( loss=loss, logits=reshaped_logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) @add_start_docstrings( """ XLNet Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of the hidden-states output to compute `span start logits` and `span end logits`). """, XLNET_START_DOCSTRING, ) class XLNetForQuestionAnsweringSimple(XLNetPreTrainedModel): def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.transformer = XLNetModel(config) self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=XLNetForQuestionAnsweringSimpleOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, mems: Optional[torch.Tensor] = None, perm_mask: Optional[torch.Tensor] = None, target_mapping: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, input_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, start_positions: Optional[torch.Tensor] = None, end_positions: Optional[torch.Tensor] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, # delete when `use_cache` is removed in XLNetModel ) -> Union[Tuple, XLNetForQuestionAnsweringSimpleOutput]: r""" start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the start of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. end_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the end of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.transformer( input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, **kwargs, ) sequence_output = outputs[0] logits = self.qa_outputs(sequence_output) start_logits, end_logits = logits.split(1, dim=-1) start_logits = start_logits.squeeze(-1).contiguous() end_logits = end_logits.squeeze(-1).contiguous() total_loss = None if start_positions is not None and end_positions is not None: # If we are on multi-GPU, split add a dimension if len(start_positions.size()) > 1: start_positions = start_positions.squeeze(-1) if len(end_positions.size()) > 1: end_positions = end_positions.squeeze(-1) # sometimes the start/end positions are outside our model inputs, we ignore these terms ignored_index = start_logits.size(1) start_positions = start_positions.clamp(0, ignored_index) end_positions = end_positions.clamp(0, ignored_index) loss_fct = CrossEntropyLoss(ignore_index=ignored_index) start_loss = loss_fct(start_logits, start_positions) end_loss = loss_fct(end_logits, end_positions) total_loss = (start_loss + end_loss) / 2 if not return_dict: output = (start_logits, end_logits) + outputs[1:] return ((total_loss,) + output) if total_loss is not None else output return XLNetForQuestionAnsweringSimpleOutput( loss=total_loss, start_logits=start_logits, end_logits=end_logits, mems=outputs.mems, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ XLNet Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of the hidden-states output to compute `span start logits` and `span end logits`). """, XLNET_START_DOCSTRING, ) class XLNetForQuestionAnswering(XLNetPreTrainedModel): def __init__(self, config): super().__init__(config) self.start_n_top = config.start_n_top self.end_n_top = config.end_n_top self.transformer = XLNetModel(config) self.start_logits = PoolerStartLogits(config) self.end_logits = PoolerEndLogits(config) self.answer_class = PoolerAnswerClass(config) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=XLNetForQuestionAnsweringOutput, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, mems: Optional[torch.Tensor] = None, perm_mask: Optional[torch.Tensor] = None, target_mapping: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, input_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, start_positions: Optional[torch.Tensor] = None, end_positions: Optional[torch.Tensor] = None, is_impossible: Optional[torch.Tensor] = None, cls_index: Optional[torch.Tensor] = None, p_mask: Optional[torch.Tensor] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, # delete when `use_cache` is removed in XLNetModel ) -> Union[Tuple, XLNetForQuestionAnsweringOutput]: r""" start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the start of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. end_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the end of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. is_impossible (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels whether a question has an answer or no answer (SQuAD 2.0) cls_index (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the classification token to use as input for computing plausibility of the answer. p_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Optional mask of tokens which can't be in answers (e.g. [CLS], [PAD], ...). 1.0 means token should be masked. 0.0 mean token is not masked. Returns: Example: ```python >>> from transformers import AutoTokenizer, XLNetForQuestionAnswering >>> import torch >>> tokenizer = AutoTokenizer.from_pretrained("xlnet-base-cased") >>> model = XLNetForQuestionAnswering.from_pretrained("xlnet-base-cased") >>> input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute", add_special_tokens=True)).unsqueeze( ... 0 ... ) # Batch size 1 >>> start_positions = torch.tensor([1]) >>> end_positions = torch.tensor([3]) >>> outputs = model(input_ids, start_positions=start_positions, end_positions=end_positions) >>> loss = outputs.loss ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict transformer_outputs = self.transformer( input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, **kwargs, ) hidden_states = transformer_outputs[0] start_logits = self.start_logits(hidden_states, p_mask=p_mask) outputs = transformer_outputs[1:] # Keep mems, hidden states, attentions if there are in it if start_positions is not None and end_positions is not None: # If we are on multi-GPU, let's remove the dimension added by batch splitting for x in (start_positions, end_positions, cls_index, is_impossible): if x is not None and x.dim() > 1: x.squeeze_(-1) # during training, compute the end logits based on the ground truth of the start position end_logits = self.end_logits(hidden_states, start_positions=start_positions, p_mask=p_mask) loss_fct = CrossEntropyLoss() start_loss = loss_fct(start_logits, start_positions) end_loss = loss_fct(end_logits, end_positions) total_loss = (start_loss + end_loss) / 2 if cls_index is not None and is_impossible is not None: # Predict answerability from the representation of CLS and START cls_logits = self.answer_class(hidden_states, start_positions=start_positions, cls_index=cls_index) loss_fct_cls = nn.BCEWithLogitsLoss() cls_loss = loss_fct_cls(cls_logits, is_impossible) # note(zhiliny): by default multiply the loss by 0.5 so that the scale is comparable to start_loss and end_loss total_loss += cls_loss * 0.5 if not return_dict: return (total_loss,) + transformer_outputs[1:] else: return XLNetForQuestionAnsweringOutput( loss=total_loss, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) else: # during inference, compute the end logits based on beam search bsz, slen, hsz = hidden_states.size() start_log_probs = nn.functional.softmax(start_logits, dim=-1) # shape (bsz, slen) start_top_log_probs, start_top_index = torch.topk( start_log_probs, self.start_n_top, dim=-1 ) # shape (bsz, start_n_top) start_top_index_exp = start_top_index.unsqueeze(-1).expand(-1, -1, hsz) # shape (bsz, start_n_top, hsz) start_states = torch.gather(hidden_states, -2, start_top_index_exp) # shape (bsz, start_n_top, hsz) start_states = start_states.unsqueeze(1).expand(-1, slen, -1, -1) # shape (bsz, slen, start_n_top, hsz) hidden_states_expanded = hidden_states.unsqueeze(2).expand_as( start_states ) # shape (bsz, slen, start_n_top, hsz) p_mask = p_mask.unsqueeze(-1) if p_mask is not None else None end_logits = self.end_logits(hidden_states_expanded, start_states=start_states, p_mask=p_mask) end_log_probs = nn.functional.softmax(end_logits, dim=1) # shape (bsz, slen, start_n_top) end_top_log_probs, end_top_index = torch.topk( end_log_probs, self.end_n_top, dim=1 ) # shape (bsz, end_n_top, start_n_top) end_top_log_probs = end_top_log_probs.view(-1, self.start_n_top * self.end_n_top) end_top_index = end_top_index.view(-1, self.start_n_top * self.end_n_top) start_states = torch.einsum( "blh,bl->bh", hidden_states, start_log_probs ) # get the representation of START as weighted sum of hidden states cls_logits = self.answer_class( hidden_states, start_states=start_states, cls_index=cls_index ) # Shape (batch size,): one single `cls_logits` for each sample if not return_dict: outputs = (start_top_log_probs, start_top_index, end_top_log_probs, end_top_index, cls_logits) return outputs + transformer_outputs[1:] else: return XLNetForQuestionAnsweringOutput( start_top_log_probs=start_top_log_probs, start_top_index=start_top_index, end_top_log_probs=end_top_log_probs, end_top_index=end_top_index, cls_logits=cls_logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
77,792
src/transformers/models/xlnet/modeling_tf_xlnet.py
# coding=utf-8 # Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TF 2.0 XLNet model. """ import warnings from dataclasses import dataclass from typing import List, Optional, Tuple, Union import numpy as np import tensorflow as tf from ...activations_tf import get_tf_activation from ...modeling_tf_utils import ( TFCausalLanguageModelingLoss, TFModelInputType, TFMultipleChoiceLoss, TFPreTrainedModel, TFQuestionAnsweringLoss, TFSequenceClassificationLoss, TFSequenceSummary, TFSharedEmbeddings, TFTokenClassificationLoss, get_initializer, keras_serializable, unpack_inputs, ) from ...tf_utils import shape_list, stable_softmax from ...utils import ( MULTIPLE_CHOICE_DUMMY_INPUTS, ModelOutput, add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_xlnet import XLNetConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "xlnet-base-cased" _CONFIG_FOR_DOC = "XLNetConfig" TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST = [ "xlnet-base-cased", "xlnet-large-cased", # See all XLNet models at https://huggingface.co/models?filter=xlnet ] class TFXLNetRelativeAttention(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) if config.d_model % config.n_head != 0: raise ValueError( f"The hidden size ({config.d_model}) is not a multiple of the number of attention " f"heads ({config.n_head}" ) self.n_head = config.n_head self.d_head = config.d_head self.d_model = config.d_model self.scale = 1 / (config.d_head**0.5) self.initializer_range = config.initializer_range self.output_attentions = config.output_attentions self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="layer_norm") self.dropout = tf.keras.layers.Dropout(config.dropout) def build(self, input_shape): initializer = get_initializer(self.initializer_range) self.q = self.add_weight( shape=(self.d_model, self.n_head, self.d_head), initializer=initializer, trainable=True, name="q" ) self.k = self.add_weight( shape=(self.d_model, self.n_head, self.d_head), initializer=initializer, trainable=True, name="k" ) self.v = self.add_weight( shape=(self.d_model, self.n_head, self.d_head), initializer=initializer, trainable=True, name="v" ) self.o = self.add_weight( shape=(self.d_model, self.n_head, self.d_head), initializer=initializer, trainable=True, name="o" ) self.r = self.add_weight( shape=(self.d_model, self.n_head, self.d_head), initializer=initializer, trainable=True, name="r" ) self.r_r_bias = self.add_weight( shape=(self.n_head, self.d_head), initializer="zeros", trainable=True, name="r_r_bias" ) self.r_s_bias = self.add_weight( shape=(self.n_head, self.d_head), initializer="zeros", trainable=True, name="r_s_bias" ) self.r_w_bias = self.add_weight( shape=(self.n_head, self.d_head), initializer="zeros", trainable=True, name="r_w_bias" ) self.seg_embed = self.add_weight( shape=(2, self.n_head, self.d_head), initializer=initializer, trainable=True, name="seg_embed" ) super().build(input_shape) def prune_heads(self, heads): raise NotImplementedError def rel_shift(self, x, klen=-1): """perform relative shift to form the relative attention score.""" x_size = shape_list(x) x = tf.reshape(x, (x_size[1], x_size[0], x_size[2], x_size[3])) x = x[1:, ...] x = tf.reshape(x, (x_size[0], x_size[1] - 1, x_size[2], x_size[3])) x = x[:, 0:klen, :, :] # x = torch.index_select(x, 1, torch.arange(klen, device=x.device, dtype=torch.long)) return x def rel_attn_core( self, q_head, k_head_h, v_head_h, k_head_r, seg_mat, attn_mask, head_mask, output_attentions, training=False ): """Core relative positional attention operations.""" # content based attention score ac = tf.einsum("ibnd,jbnd->ijbn", q_head + self.r_w_bias, k_head_h) # position based attention score bd = tf.einsum("ibnd,jbnd->ijbn", q_head + self.r_r_bias, k_head_r) bd = self.rel_shift(bd, klen=shape_list(ac)[1]) # segment based attention score if seg_mat is None: ef = 0 else: ef = tf.einsum("ibnd,snd->ibns", q_head + self.r_s_bias, self.seg_embed) ef = tf.einsum("ijbs,ibns->ijbn", seg_mat, ef) # merge attention scores and perform masking attn_score = (ac + bd + ef) * self.scale if attn_mask is not None: # attn_score = attn_score * (1 - attn_mask) - 1e30 * attn_mask if attn_mask.dtype == tf.float16 or attn_mask.dtype == tf.bfloat16: attn_score = attn_score - 65500 * attn_mask else: attn_score = attn_score - 1e30 * attn_mask # attention probability attn_prob = stable_softmax(attn_score, axis=1) attn_prob = self.dropout(attn_prob, training=training) # Mask heads if we want to if head_mask is not None: attn_prob = attn_prob * head_mask # attention output attn_vec = tf.einsum("ijbn,jbnd->ibnd", attn_prob, v_head_h) if output_attentions: return attn_vec, attn_prob return attn_vec def post_attention(self, h, attn_vec, residual=True, training=False): """Post-attention processing.""" # post-attention projection (back to `d_model`) attn_out = tf.einsum("ibnd,hnd->ibh", attn_vec, self.o) attn_out = self.dropout(attn_out, training=training) if residual: attn_out = attn_out + h output = self.layer_norm(attn_out) return output def call( self, h, g, attn_mask_h, attn_mask_g, r, seg_mat, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, output_attentions: Optional[bool] = False, training: bool = False, ): if g is not None: # Two-stream attention with relative positional encoding. # content based attention score if mems is not None and len(shape_list(mems)) > 1: cat = tf.concat([mems, h], axis=0) else: cat = h # content-based key head k_head_h = tf.einsum("ibh,hnd->ibnd", cat, self.k) # content-based value head v_head_h = tf.einsum("ibh,hnd->ibnd", cat, self.v) # position-based key head k_head_r = tf.einsum("ibh,hnd->ibnd", r, self.r) # h-stream # content-stream query head q_head_h = tf.einsum("ibh,hnd->ibnd", h, self.q) # core attention ops attn_vec_h = self.rel_attn_core( q_head_h, k_head_h, v_head_h, k_head_r, seg_mat, attn_mask_h, head_mask, output_attentions, training=training, ) if output_attentions: attn_vec_h, attn_prob_h = attn_vec_h # post processing output_h = self.post_attention(h, attn_vec_h, training=training) # g-stream # query-stream query head q_head_g = tf.einsum("ibh,hnd->ibnd", g, self.q) # core attention ops if target_mapping is not None: q_head_g = tf.einsum("mbnd,mlb->lbnd", q_head_g, target_mapping) attn_vec_g = self.rel_attn_core( q_head_g, k_head_h, v_head_h, k_head_r, seg_mat, attn_mask_g, head_mask, output_attentions, training=training, ) if output_attentions: attn_vec_g, attn_prob_g = attn_vec_g attn_vec_g = tf.einsum("lbnd,mlb->mbnd", attn_vec_g, target_mapping) else: attn_vec_g = self.rel_attn_core( q_head_g, k_head_h, v_head_h, k_head_r, seg_mat, attn_mask_g, head_mask, output_attentions, training=training, ) if output_attentions: attn_vec_g, attn_prob_g = attn_vec_g # post processing output_g = self.post_attention(g, attn_vec_g, training=training) if output_attentions: attn_prob = attn_prob_h, attn_prob_g else: # Multi-head attention with relative positional encoding if mems is not None and len(shape_list(mems)) > 1: cat = tf.concat([mems, h], axis=0) else: cat = h # content heads q_head_h = tf.einsum("ibh,hnd->ibnd", h, self.q) k_head_h = tf.einsum("ibh,hnd->ibnd", cat, self.k) v_head_h = tf.einsum("ibh,hnd->ibnd", cat, self.v) # positional heads k_head_r = tf.einsum("ibh,hnd->ibnd", r, self.r) # core attention ops attn_vec = self.rel_attn_core( q_head_h, k_head_h, v_head_h, k_head_r, seg_mat, attn_mask_h, head_mask, output_attentions, training=training, ) if output_attentions: attn_vec, attn_prob = attn_vec # post processing output_h = self.post_attention(h, attn_vec, training=training) output_g = None outputs = (output_h, output_g) if output_attentions: outputs = outputs + (attn_prob,) return outputs class TFXLNetFeedForward(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="layer_norm") self.layer_1 = tf.keras.layers.Dense( config.d_inner, kernel_initializer=get_initializer(config.initializer_range), name="layer_1" ) self.layer_2 = tf.keras.layers.Dense( config.d_model, kernel_initializer=get_initializer(config.initializer_range), name="layer_2" ) self.dropout = tf.keras.layers.Dropout(config.dropout) if isinstance(config.ff_activation, str): self.activation_function = get_tf_activation(config.ff_activation) else: self.activation_function = config.ff_activation def call(self, inp, training=False): output = inp output = self.layer_1(output) output = self.activation_function(output) output = self.dropout(output, training=training) output = self.layer_2(output) output = self.dropout(output, training=training) output = self.layer_norm(output + inp) return output class TFXLNetLayer(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.rel_attn = TFXLNetRelativeAttention(config, name="rel_attn") self.ff = TFXLNetFeedForward(config, name="ff") self.dropout = tf.keras.layers.Dropout(config.dropout) def call( self, output_h, output_g, non_tgt_mask, attn_mask, pos_emb, seg_mat, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, output_attentions: Optional[bool] = False, training: bool = False, ): outputs = self.rel_attn( output_h, output_g, non_tgt_mask, attn_mask, pos_emb, seg_mat, mems, target_mapping, head_mask, output_attentions, training=training, ) output_h, output_g = outputs[:2] if output_g is not None: output_g = self.ff(output_g, training=training) output_h = self.ff(output_h, training=training) outputs = (output_h, output_g) + outputs[2:] # Add again attentions if there are there return outputs class TFXLNetLMHead(tf.keras.layers.Layer): def __init__(self, config, input_embeddings, **kwargs): super().__init__(**kwargs) self.config = config # The output weights are the same as the input embeddings, but there is # an output-only bias for each token. self.input_embeddings = input_embeddings def build(self, input_shape): self.bias = self.add_weight(shape=(self.config.vocab_size,), initializer="zeros", trainable=True, name="bias") super().build(input_shape) def get_output_embeddings(self): return self.input_embeddings def set_output_embeddings(self, value): self.input_embeddings.weight = value self.input_embeddings.vocab_size = shape_list(value)[0] def get_bias(self): return {"bias": self.bias} def set_bias(self, value): self.bias = value["bias"] self.config.vocab_size = shape_list(value["bias"])[0] def call(self, hidden_states): hidden_states = self.input_embeddings(hidden_states, mode="linear") hidden_states = hidden_states + self.bias return hidden_states @keras_serializable class TFXLNetMainLayer(tf.keras.layers.Layer): config_class = XLNetConfig def __init__(self, config, **kwargs): super().__init__(**kwargs) self.config = config self.output_hidden_states = config.output_hidden_states self.output_attentions = config.output_attentions self.return_dict = config.return_dict self.mem_len = config.mem_len self.reuse_len = config.reuse_len self.d_model = config.d_model self.same_length = config.same_length self.attn_type = config.attn_type self.bi_data = config.bi_data self.clamp_len = config.clamp_len self.n_layer = config.n_layer self.use_bfloat16 = config.use_bfloat16 self.initializer_range = config.initializer_range self.word_embedding = TFSharedEmbeddings( config.vocab_size, config.d_model, initializer_range=config.initializer_range, name="word_embedding" ) self.layer = [TFXLNetLayer(config, name=f"layer_._{i}") for i in range(config.n_layer)] self.dropout = tf.keras.layers.Dropout(config.dropout) self.use_mems_eval = config.use_mems_eval self.use_mems_train = config.use_mems_train def get_input_embeddings(self): return self.word_embedding def set_input_embeddings(self, value): self.word_embedding.weight = value self.word_embedding.vocab_size = shape_list(value)[0] def build(self, input_shape): initializer = get_initializer(self.initializer_range) self.mask_emb = self.add_weight( shape=(1, 1, self.d_model), initializer=initializer, trainable=True, name="mask_emb" ) def _prune_heads(self, heads_to_prune): raise NotImplementedError def create_mask(self, qlen, mlen): """ Creates causal attention mask. Float mask where 1.0 indicates masked, 0.0 indicates not-masked. Args: qlen: TODO Lysandre didn't fill mlen: TODO Lysandre didn't fill ``` same_length=False: same_length=True: <mlen > < qlen > <mlen > < qlen > ^ [0 0 0 0 0 1 1 1 1] [0 0 0 0 0 1 1 1 1] [0 0 0 0 0 0 1 1 1] [1 0 0 0 0 0 1 1 1] qlen [0 0 0 0 0 0 0 1 1] [1 1 0 0 0 0 0 1 1] [0 0 0 0 0 0 0 0 1] [1 1 1 0 0 0 0 0 1] v [0 0 0 0 0 0 0 0 0] [1 1 1 1 0 0 0 0 0] ``` """ attn_mask = tf.ones([qlen, qlen]) mask_u = tf.linalg.band_part(attn_mask, 0, -1) mask_dia = tf.linalg.band_part(attn_mask, 0, 0) attn_mask_pad = tf.zeros([qlen, mlen]) ret = tf.concat([attn_mask_pad, mask_u - mask_dia], 1) if self.same_length: mask_l = tf.linalg.band_part(attn_mask, -1, 0) ret = tf.concat([ret[:, :qlen] + mask_l - mask_dia, ret[:, qlen:]], 1) return ret def cache_mem(self, curr_out, prev_mem): # cache hidden states into memory. if self.reuse_len is not None and self.reuse_len > 0: curr_out = curr_out[: self.reuse_len] if self.mem_len is None or self.mem_len == 0: # If `use_mems` is active but no `mem_len` is defined, the model behaves like GPT-2 at inference time # and returns all of the past and current hidden states. cutoff = 0 else: # If `use_mems` is active and `mem_len` is defined, the model returns the last `mem_len` hidden # states. This is the preferred setting for training and long-form generation. cutoff = -self.mem_len if prev_mem is None: # if `use_mems` is active and `mem_len` is defined, the model new_mem = curr_out[cutoff:] else: new_mem = tf.concat([prev_mem, curr_out], 0)[cutoff:] return tf.stop_gradient(new_mem) @staticmethod def positional_embedding(pos_seq, inv_freq, bsz=None): sinusoid_inp = tf.einsum("i,d->id", pos_seq, inv_freq) pos_emb = tf.concat([tf.sin(sinusoid_inp), tf.cos(sinusoid_inp)], axis=-1) pos_emb = pos_emb[:, None, :] if bsz is not None: pos_emb = tf.tile(pos_emb, [1, bsz, 1]) return pos_emb def relative_positional_encoding(self, qlen, klen, bsz=None): """create relative positional encoding.""" freq_seq = tf.range(0, self.d_model, 2.0) inv_freq = 1 / (10000 ** (freq_seq / self.d_model)) if self.attn_type == "bi": # beg, end = klen - 1, -qlen beg, end = klen, -qlen elif self.attn_type == "uni": # beg, end = klen - 1, -1 beg, end = klen, -1 else: raise ValueError(f"Unknown `attn_type` {self.attn_type}.") if self.bi_data: fwd_pos_seq = tf.range(beg, end, -1.0) bwd_pos_seq = tf.range(-beg, -end, 1.0) if self.clamp_len > 0: fwd_pos_seq = tf.clip_by_value(fwd_pos_seq, -self.clamp_len, self.clamp_len) bwd_pos_seq = tf.clip_by_value(bwd_pos_seq, -self.clamp_len, self.clamp_len) if bsz is not None: if bsz % 2 != 0: raise ValueError(f"With bi_data, the batch size {bsz} should be divisible by 2") fwd_pos_emb = self.positional_embedding(fwd_pos_seq, inv_freq, bsz // 2) bwd_pos_emb = self.positional_embedding(bwd_pos_seq, inv_freq, bsz // 2) else: fwd_pos_emb = self.positional_embedding(fwd_pos_seq, inv_freq) bwd_pos_emb = self.positional_embedding(bwd_pos_seq, inv_freq) pos_emb = tf.concat([fwd_pos_emb, bwd_pos_emb], axis=1) else: fwd_pos_seq = tf.range(beg, end, -1.0) if self.clamp_len > 0: fwd_pos_seq = tf.clip_by_value(fwd_pos_seq, -self.clamp_len, self.clamp_len) pos_emb = self.positional_embedding(fwd_pos_seq, inv_freq, bsz) return pos_emb @unpack_inputs def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, perm_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, input_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: bool = False, ): if training and use_mems is None: use_mems = self.use_mems_train else: use_mems = self.use_mems_eval # the original code for XLNet uses shapes [len, bsz] with the batch dimension at the end # but we want a unified interface in the library with the batch size on the first dimension # so we move here the first dimension (batch) to the end if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_ids = tf.transpose(input_ids, perm=(1, 0)) qlen, bsz = shape_list(input_ids)[:2] elif inputs_embeds is not None: inputs_embeds = tf.transpose(inputs_embeds, perm=(1, 0, 2)) qlen, bsz = shape_list(inputs_embeds)[:2] else: raise ValueError("You have to specify either input_ids or inputs_embeds") token_type_ids = tf.transpose(token_type_ids, perm=(1, 0)) if token_type_ids is not None else None input_mask = tf.transpose(input_mask, perm=(1, 0)) if input_mask is not None else None attention_mask = tf.transpose(attention_mask, perm=(1, 0)) if attention_mask is not None else None perm_mask = tf.transpose(perm_mask, perm=(1, 2, 0)) if perm_mask is not None else None target_mapping = tf.transpose(target_mapping, perm=(1, 2, 0)) if target_mapping is not None else None mlen = shape_list(mems[0])[0] if mems is not None and mems[0] is not None else 0 klen = mlen + qlen # Attention mask # causal attention mask if self.attn_type == "uni": attn_mask = self.create_mask(qlen, mlen) attn_mask = attn_mask[:, :, None, None] elif self.attn_type == "bi": attn_mask = None else: raise ValueError(f"Unsupported attention type: {self.attn_type}") # data mask: input mask & perm mask assert input_mask is None or attention_mask is None, ( "You can only use one of input_mask (uses 1 for padding) " "or attention_mask (uses 0 for padding, added for compatibility with BERT). Please choose one." ) if input_mask is None and attention_mask is not None: one_cst = tf.constant(1.0) input_mask = 1.0 - tf.cast(attention_mask, dtype=one_cst.dtype) if input_mask is not None and perm_mask is not None: data_mask = input_mask[None] + perm_mask elif input_mask is not None and perm_mask is None: data_mask = input_mask[None] elif input_mask is None and perm_mask is not None: data_mask = perm_mask else: data_mask = None if data_mask is not None: # all mems can be attended to if mlen > 0: mems_mask = tf.zeros([shape_list(data_mask)[0], mlen, bsz]) data_mask = tf.concat([mems_mask, data_mask], axis=1) if attn_mask is None: attn_mask = data_mask[:, :, :, None] else: attn_mask += data_mask[:, :, :, None] if attn_mask is not None: attn_mask = tf.cast(attn_mask > 0, dtype=attn_mask.dtype) if attn_mask is not None: non_tgt_mask = -tf.eye(qlen) if mlen > 0: non_tgt_mask = tf.concat([tf.zeros([qlen, mlen]), non_tgt_mask], axis=-1) non_tgt_mask = tf.cast((attn_mask + non_tgt_mask[:, :, None, None]) > 0, dtype=non_tgt_mask.dtype) else: non_tgt_mask = None # Word embeddings and prepare h & g hidden states if inputs_embeds is not None: word_emb_k = inputs_embeds else: # Note: tf.gather, on which the embedding layer is based, won't check positive out of bound # indices on GPU, returning zeros instead. This is a dangerous silent behavior. tf.debugging.assert_less( input_ids, tf.cast(self.word_embedding.vocab_size, dtype=input_ids.dtype), message=( "input_ids must be smaller than the embedding layer's input dimension (got" f" {tf.math.reduce_max(input_ids)} >= {self.word_embedding.vocab_size})" ), ) word_emb_k = self.word_embedding(input_ids) output_h = self.dropout(word_emb_k, training=training) if target_mapping is not None: word_emb_q = tf.tile(self.mask_emb, [shape_list(target_mapping)[0], bsz, 1]) # else: # We removed the inp_q input which was same as target mapping # inp_q_ext = inp_q[:, :, None] # word_emb_q = inp_q_ext * self.mask_emb + (1 - inp_q_ext) * word_emb_k output_g = self.dropout(word_emb_q, training=training) else: output_g = None # Segment embedding if token_type_ids is not None: # Convert `token_type_ids` to one-hot `seg_mat` if mlen > 0: mem_pad = tf.zeros([mlen, bsz], dtype=token_type_ids.dtype) cat_ids = tf.concat([mem_pad, token_type_ids], 0) else: cat_ids = token_type_ids # `1` indicates not in the same segment [qlen x klen x bsz] seg_mat = tf.cast( tf.logical_not(tf.equal(token_type_ids[:, None], cat_ids[None, :])), dtype=token_type_ids.dtype, ) seg_mat = tf.one_hot(seg_mat, 2) else: seg_mat = None # Positional encoding pos_emb = self.relative_positional_encoding(qlen, klen, bsz=bsz) pos_emb = self.dropout(pos_emb, training=training) # Prepare head mask if needed # 1.0 in head_mask indicate we keep the head # attention_probs has shape bsz x n_heads x N x N # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] (a head_mask for each layer) # and head_mask is converted to shape [num_hidden_layers x qlen x klen x bsz x n_head] if head_mask is not None: raise NotImplementedError else: head_mask = [None] * self.n_layer new_mems = () if mems is None: mems = [None] * len(self.layer) attentions = [] if output_attentions else None hidden_states = [] if output_hidden_states else None for i, layer_module in enumerate(self.layer): # cache new mems if use_mems: new_mems = new_mems + (self.cache_mem(output_h, mems[i]),) if output_hidden_states: hidden_states.append((output_h, output_g) if output_g is not None else output_h) outputs = layer_module( output_h, output_g, non_tgt_mask, attn_mask, pos_emb, seg_mat, mems[i], target_mapping, head_mask[i], output_attentions, training=training, ) output_h, output_g = outputs[:2] if output_attentions: attentions.append(outputs[2]) # Add last hidden state if output_hidden_states: hidden_states.append((output_h, output_g) if output_g is not None else output_h) output = self.dropout(output_g if output_g is not None else output_h, training=training) # Prepare outputs, we transpose back here to shape [bsz, len, hidden_dim] (cf. beginning of forward() method) output = tf.transpose(output, perm=(1, 0, 2)) if not use_mems: new_mems = None if output_hidden_states: if output_g is not None: hidden_states = tuple(tf.transpose(h, perm=(1, 0, 2)) for hs in hidden_states for h in hs) else: hidden_states = tuple(tf.transpose(hs, perm=(1, 0, 2)) for hs in hidden_states) if output_attentions: if target_mapping is not None: # when target_mapping is provided, there are 2-tuple of attentions attentions = tuple( tuple(tf.transpose(attn_stream, perm=(2, 3, 0, 1)) for attn_stream in t) for t in attentions ) else: attentions = tuple(tf.transpose(t, perm=(2, 3, 0, 1)) for t in attentions) if not return_dict: return tuple(v for v in [output, new_mems, hidden_states, attentions] if v is not None) return TFXLNetModelOutput( last_hidden_state=output, mems=new_mems, hidden_states=hidden_states, attentions=attentions ) class TFXLNetPreTrainedModel(TFPreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = XLNetConfig base_model_prefix = "transformer" @dataclass class TFXLNetModelOutput(ModelOutput): """ Output type of [`TFXLNetModel`]. Args: last_hidden_state (`tf.Tensor` of shape `(batch_size, num_predict, hidden_size)`): Sequence of hidden-states at the last layer of the model. `num_predict` corresponds to `target_mapping.shape[1]`. If `target_mapping` is `None`, then `num_predict` corresponds to `sequence_length`. mems (`List[tf.Tensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ last_hidden_state: tf.Tensor = None mems: Optional[List[tf.Tensor]] = None hidden_states: Optional[Tuple[tf.Tensor]] = None attentions: Optional[Tuple[tf.Tensor]] = None @dataclass class TFXLNetLMHeadModelOutput(ModelOutput): """ Output type of [`TFXLNetLMHeadModel`]. Args: loss (`tf.Tensor` of shape *(1,)*, *optional*, returned when `labels` is provided) Language modeling loss (for next-token prediction). logits (`tf.Tensor` of shape `(batch_size, num_predict, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). `num_predict` corresponds to `target_mapping.shape[1]`. If `target_mapping` is `None`, then `num_predict` corresponds to `sequence_length`. mems (`List[tf.Tensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[tf.Tensor] = None logits: tf.Tensor = None mems: Optional[List[tf.Tensor]] = None hidden_states: Optional[Tuple[tf.Tensor]] = None attentions: Optional[Tuple[tf.Tensor]] = None @dataclass class TFXLNetForSequenceClassificationOutput(ModelOutput): """ Output type of [`TFXLNetForSequenceClassification`]. Args: loss (`tf.Tensor` of shape `(1,)`, *optional*, returned when `label` is provided): Classification (or regression if config.num_labels==1) loss. logits (`tf.Tensor` of shape `(batch_size, config.num_labels)`): Classification (or regression if config.num_labels==1) scores (before SoftMax). mems (`List[tf.Tensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[tf.Tensor] = None logits: tf.Tensor = None mems: Optional[List[tf.Tensor]] = None hidden_states: Optional[Tuple[tf.Tensor]] = None attentions: Optional[Tuple[tf.Tensor]] = None @dataclass class TFXLNetForTokenClassificationOutput(ModelOutput): """ Output type of [`TFXLNetForTokenClassificationOutput`]. Args: loss (`tf.Tensor` of shape `(1,)`, *optional*, returned when `labels` is provided) : Classification loss. logits (`tf.Tensor` of shape `(batch_size, sequence_length, config.num_labels)`): Classification scores (before SoftMax). mems (`List[tf.Tensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[tf.Tensor] = None logits: tf.Tensor = None mems: Optional[List[tf.Tensor]] = None hidden_states: Optional[Tuple[tf.Tensor]] = None attentions: Optional[Tuple[tf.Tensor]] = None @dataclass class TFXLNetForMultipleChoiceOutput(ModelOutput): """ Output type of [`TFXLNetForMultipleChoice`]. Args: loss (`tf.Tensor` of shape *(1,)*, *optional*, returned when `labels` is provided): Classification loss. logits (`tf.Tensor` of shape `(batch_size, num_choices)`): *num_choices* is the second dimension of the input tensors. (see *input_ids* above). Classification scores (before SoftMax). mems (`List[tf.Tensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[tf.Tensor] = None logits: tf.Tensor = None mems: Optional[List[tf.Tensor]] = None hidden_states: Optional[Tuple[tf.Tensor]] = None attentions: Optional[Tuple[tf.Tensor]] = None @dataclass class TFXLNetForQuestionAnsweringSimpleOutput(ModelOutput): """ Output type of [`TFXLNetForQuestionAnsweringSimple`]. Args: loss (`tf.Tensor` of shape `(1,)`, *optional*, returned when `labels` is provided): Total span extraction loss is the sum of a Cross-Entropy for the start and end positions. start_logits (`tf.Tensor` of shape `(batch_size, sequence_length,)`): Span-start scores (before SoftMax). end_logits (`tf.Tensor` of shape `(batch_size, sequence_length,)`): Span-end scores (before SoftMax). mems (`List[tf.Tensor]` of length `config.n_layers`): Contains pre-computed hidden-states. Can be used (see `mems` input) to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[tf.Tensor] = None start_logits: tf.Tensor = None end_logits: tf.Tensor = None mems: Optional[List[tf.Tensor]] = None hidden_states: Optional[Tuple[tf.Tensor]] = None attentions: Optional[Tuple[tf.Tensor]] = None XLNET_START_DOCSTRING = r""" This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage and behavior. <Tip> TensorFlow models and layers in `transformers` accept two formats as input: - having all inputs as keyword arguments (like PyTorch models), or - having all inputs as a list, tuple or dict in the first positional argument. The reason the second format is supported is that Keras methods prefer this format when passing inputs to models and layers. Because of this support, when using methods like `model.fit()` things should "just work" for you - just pass your inputs and labels in any format that `model.fit()` supports! If, however, you want to use the second format outside of Keras methods like `fit()` and `predict()`, such as when creating your own layers or models with the Keras `Functional` API, there are three possibilities you can use to gather all the input Tensors in the first positional argument: - a single Tensor with `input_ids` only and nothing else: `model(input_ids)` - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring: `model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])` - a dictionary with one or several input Tensors associated to the input names given in the docstring: `model({"input_ids": input_ids, "token_type_ids": token_type_ids})` Note that when creating models and layers with [subclassing](https://keras.io/guides/making_new_layers_and_models_via_subclassing/) then you don't need to worry about any of this, as you can just pass inputs like you would to any other Python function! </Tip> Parameters: config ([`XLNetConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ XLNET_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) mems (`List[torch.FloatTensor]` of length `config.n_layers`): Contains pre-computed hidden-states (see `mems` output below) . Can be used to speed up sequential decoding. The token ids which have their past given to this model should not be passed as `input_ids` as they have already been computed. `use_mems` has to be set to `True` to make use of `mems`. perm_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length, sequence_length)`, *optional*): Mask to indicate the attention pattern for each input token with values selected in `[0, 1]`: - if `perm_mask[k, i, j] = 0`, i attend to j in batch k; - if `perm_mask[k, i, j] = 1`, i does not attend to j in batch k. If not set, each token attends to all the others (full bidirectional attention). Only used during pretraining (to define factorization order) or for sequential decoding (generation). target_mapping (`torch.FloatTensor` of shape `(batch_size, num_predict, sequence_length)`, *optional*): Mask to indicate the output tokens to use. If `target_mapping[k, i, j] = 1`, the i-th predict in batch k is on the j-th token. Only used during pretraining for partial prediction or for sequential decoding (generation). token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) input_mask (`torch.FloatTensor` of shape `{0}`, *optional*): Mask to avoid performing attention on padding token indices. Negative of `attention_mask`, i.e. with 0 for real tokens and 1 for padding which is kept for compatibility with the original code base. Mask values selected in `[0, 1]`: - 1 for tokens that are **masked**, - 0 for tokens that are **not masked**. You can only uses one of `input_mask` and `attention_mask`. head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare XLNet Model transformer outputting raw hidden-states without any specific head on top.", XLNET_START_DOCSTRING, ) class TFXLNetModel(TFXLNetPreTrainedModel): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.transformer = TFXLNetMainLayer(config, name="transformer") @unpack_inputs @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFXLNetModelOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, perm_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, input_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: bool = False, ) -> Union[TFXLNetModelOutput, Tuple[tf.Tensor]]: outputs = self.transformer( input_ids=input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) return outputs def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None mems = tf.convert_to_tensor(output.mems) if output.mems is not None else None return TFXLNetModelOutput( last_hidden_state=output.last_hidden_state, mems=mems, hidden_states=hs, attentions=attns ) @add_start_docstrings( """ XLNet Model with a language modeling head on top (linear layer with weights tied to the input embeddings). """, XLNET_START_DOCSTRING, ) class TFXLNetLMHeadModel(TFXLNetPreTrainedModel, TFCausalLanguageModelingLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.transformer = TFXLNetMainLayer(config, name="transformer") self.lm_loss = TFXLNetLMHead(config, self.transformer.word_embedding, name="lm_loss") # generate fails to convert to a graph with XLNet self.supports_xla_generation = False def get_lm_head(self): return self.lm_loss def get_prefix_bias_name(self): warnings.warn("The method get_prefix_bias_name is deprecated. Please use `get_bias` instead.", FutureWarning) return self.name + "/" + self.lm_loss.name def prepare_inputs_for_generation(self, inputs, past_key_values=None, use_mems=None, **kwargs): # Add dummy token at the end (no attention on this one) effective_batch_size = inputs.shape[0] dummy_token = tf.zeros((effective_batch_size, 1), dtype=inputs.dtype) # At every pass, the attention values for the new token and the two last generated tokens # are computed, the rest is reloaded from the `past` cache. A purely auto-regressive model would have # offset = 1; offset = 2 seems to have slightly better computation. offset = 2 if past_key_values: input_ids = tf.concat([inputs[:, -offset:], dummy_token], axis=1) else: input_ids = tf.concat([inputs, dummy_token], axis=1) # Build permutation mask so that previous tokens don't see last token sequence_length = input_ids.shape[1] perm_mask = tf.zeros((effective_batch_size, sequence_length, sequence_length - 1)) perm_mask_seq_end = tf.ones((effective_batch_size, sequence_length, 1)) perm_mask = tf.concat([perm_mask, perm_mask_seq_end], axis=-1) # We'll only predict the last token target_mapping = tf.zeros((effective_batch_size, 1, sequence_length - 1)) target_mapping_seq_end = tf.ones((effective_batch_size, 1, 1)) target_mapping = tf.concat([target_mapping, target_mapping_seq_end], axis=-1) inputs = { "input_ids": input_ids, "perm_mask": perm_mask, "target_mapping": target_mapping, "use_mems": use_mems, } # if past is defined in model kwargs then use it for faster decoding if past_key_values: inputs["mems"] = tuple(layer_past[:-offset, :, :] for layer_past in past_key_values) return inputs @unpack_inputs @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=TFXLNetLMHeadModelOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, perm_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, input_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[Union[np.ndarray, tf.Tensor]] = None, training: bool = False, ) -> Union[TFXLNetLMHeadModelOutput, Tuple[tf.Tensor]]: r""" labels (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the cross entropy classification loss. Indices should be in `[0, ..., config.vocab_size - 1]`. Return: Examples: ```python >>> import tensorflow as tf >>> import numpy as np >>> from transformers import AutoTokenizer, TFXLNetLMHeadModel >>> tokenizer = AutoTokenizer.from_pretrained("xlnet-large-cased") >>> model = TFXLNetLMHeadModel.from_pretrained("xlnet-large-cased") >>> # We show how to setup inputs to predict a next token using a bi-directional context. >>> input_ids = tf.constant(tokenizer.encode("Hello, my dog is very <mask>", add_special_tokens=True))[ ... None, : ... ] # We will predict the masked token >>> perm_mask = np.zeros((1, input_ids.shape[1], input_ids.shape[1])) >>> perm_mask[:, :, -1] = 1.0 # Previous tokens don't see last token >>> target_mapping = np.zeros( ... (1, 1, input_ids.shape[1]) ... ) # Shape [1, 1, seq_length] => let's predict one token >>> target_mapping[ ... 0, 0, -1 ... ] = 1.0 # Our first (and only) prediction will be the last token of the sequence (the masked token) >>> outputs = model( ... input_ids, ... perm_mask=tf.constant(perm_mask, dtype=tf.float32), ... target_mapping=tf.constant(target_mapping, dtype=tf.float32), ... ) >>> next_token_logits = outputs[ ... 0 ... ] # Output has shape [target_mapping.size(0), target_mapping.size(1), config.vocab_size] ```""" transformer_outputs = self.transformer( input_ids=input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) hidden_state = transformer_outputs[0] logits = self.lm_loss(hidden_state, training=training) loss = None if labels is not None: loss = self.hf_compute_loss(labels, logits) if not return_dict: output = (logits,) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return TFXLNetLMHeadModelOutput( loss=loss, logits=logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None mems = tf.convert_to_tensor(output.mems) if output.mems is not None else None return TFXLNetLMHeadModelOutput(logits=output.logits, mems=mems, hidden_states=hs, attentions=attns) @add_start_docstrings( """ XLNet Model with a sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for GLUE tasks. """, XLNET_START_DOCSTRING, ) class TFXLNetForSequenceClassification(TFXLNetPreTrainedModel, TFSequenceClassificationLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.num_labels = config.num_labels self.transformer = TFXLNetMainLayer(config, name="transformer") self.sequence_summary = TFSequenceSummary( config, initializer_range=config.initializer_range, name="sequence_summary" ) self.logits_proj = tf.keras.layers.Dense( config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="logits_proj" ) @unpack_inputs @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFXLNetForSequenceClassificationOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, perm_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, input_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[Union[np.ndarray, tf.Tensor]] = None, training: bool = False, ) -> Union[TFXLNetForSequenceClassificationOutput, Tuple[tf.Tensor]]: r""" labels (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ transformer_outputs = self.transformer( input_ids=input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) output = transformer_outputs[0] output = self.sequence_summary(output) logits = self.logits_proj(output) loss = None if labels is None else self.hf_compute_loss(labels, logits) if not return_dict: output = (logits,) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return TFXLNetForSequenceClassificationOutput( loss=loss, logits=logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None mems = tf.convert_to_tensor(output.mems) if output.mems is not None else None return TFXLNetForSequenceClassificationOutput( logits=output.logits, mems=mems, hidden_states=hs, attentions=attns ) @add_start_docstrings( """ XLNET Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a softmax) e.g. for RocStories/SWAG tasks. """, XLNET_START_DOCSTRING, ) class TFXLNetForMultipleChoice(TFXLNetPreTrainedModel, TFMultipleChoiceLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.transformer = TFXLNetMainLayer(config, name="transformer") self.sequence_summary = TFSequenceSummary( config, initializer_range=config.initializer_range, name="sequence_summary" ) self.logits_proj = tf.keras.layers.Dense( 1, kernel_initializer=get_initializer(config.initializer_range), name="logits_proj" ) @property def dummy_inputs(self): """ Dummy inputs to build the network. Returns: tf.Tensor with dummy inputs """ return {"input_ids": tf.constant(MULTIPLE_CHOICE_DUMMY_INPUTS, dtype=tf.int32)} @unpack_inputs @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, num_choices, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFXLNetForMultipleChoiceOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, input_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, perm_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[Union[np.ndarray, tf.Tensor]] = None, training: bool = False, ) -> Union[TFXLNetForMultipleChoiceOutput, Tuple[tf.Tensor]]: r""" labels (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for computing the multiple choice classification loss. Indices should be in `[0, ..., num_choices]` where `num_choices` is the size of the second dimension of the input tensors. (See `input_ids` above) """ if input_ids is not None: num_choices = shape_list(input_ids)[1] seq_length = shape_list(input_ids)[2] else: num_choices = shape_list(inputs_embeds)[1] seq_length = shape_list(inputs_embeds)[2] flat_input_ids = tf.reshape(input_ids, (-1, seq_length)) if input_ids is not None else None flat_attention_mask = tf.reshape(attention_mask, (-1, seq_length)) if attention_mask is not None else None flat_token_type_ids = tf.reshape(token_type_ids, (-1, seq_length)) if token_type_ids is not None else None flat_input_mask = tf.reshape(input_mask, (-1, seq_length)) if input_mask is not None else None flat_inputs_embeds = ( tf.reshape(inputs_embeds, (-1, seq_length, shape_list(inputs_embeds)[3])) if inputs_embeds is not None else None ) transformer_outputs = self.transformer( flat_input_ids, flat_attention_mask, mems, perm_mask, target_mapping, flat_token_type_ids, flat_input_mask, head_mask, flat_inputs_embeds, use_mems, output_attentions, output_hidden_states, return_dict=return_dict, training=training, ) output = transformer_outputs[0] logits = self.sequence_summary(output) logits = self.logits_proj(logits) reshaped_logits = tf.reshape(logits, (-1, num_choices)) loss = None if labels is None else self.hf_compute_loss(labels, reshaped_logits) if not return_dict: output = (reshaped_logits,) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return TFXLNetForMultipleChoiceOutput( loss=loss, logits=reshaped_logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) @tf.function( input_signature=[ { "input_ids": tf.TensorSpec((None, None, None), tf.int32, name="input_ids"), "attention_mask": tf.TensorSpec((None, None, None), tf.int32, name="attention_mask"), "token_type_ids": tf.TensorSpec((None, None, None), tf.int32, name="token_type_ids"), } ] ) def serving(self, inputs): output = self.call(inputs) return self.serving_output(output) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None mems = tf.convert_to_tensor(output.mems) if output.mems is not None else None return TFXLNetForMultipleChoiceOutput(logits=output.logits, mems=mems, hidden_states=hs, attentions=attns) @add_start_docstrings( """ XLNet Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. """, XLNET_START_DOCSTRING, ) class TFXLNetForTokenClassification(TFXLNetPreTrainedModel, TFTokenClassificationLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.num_labels = config.num_labels self.transformer = TFXLNetMainLayer(config, name="transformer") self.classifier = tf.keras.layers.Dense( config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="classifier" ) @unpack_inputs @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFXLNetForTokenClassificationOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, perm_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, input_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[Union[np.ndarray, tf.Tensor]] = None, training: bool = False, ) -> Union[TFXLNetForTokenClassificationOutput, Tuple[tf.Tensor]]: r""" labels (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`. """ transformer_outputs = self.transformer( input_ids=input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) output = transformer_outputs[0] logits = self.classifier(output) loss = None if labels is None else self.hf_compute_loss(labels, logits) if not return_dict: output = (logits,) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return TFXLNetForTokenClassificationOutput( loss=loss, logits=logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None mems = tf.convert_to_tensor(output.mems) if output.mems is not None else None return TFXLNetForTokenClassificationOutput(logits=output.logits, mems=mems, hidden_states=hs, attentions=attns) @add_start_docstrings( """ XLNet Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of the hidden-states output to compute `span start logits` and `span end logits`). """, XLNET_START_DOCSTRING, ) class TFXLNetForQuestionAnsweringSimple(TFXLNetPreTrainedModel, TFQuestionAnsweringLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.transformer = TFXLNetMainLayer(config, name="transformer") self.qa_outputs = tf.keras.layers.Dense( config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="qa_outputs" ) @unpack_inputs @add_start_docstrings_to_model_forward(XLNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFXLNetForQuestionAnsweringSimpleOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, mems: Optional[Union[np.ndarray, tf.Tensor]] = None, perm_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, target_mapping: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, input_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, use_mems: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, start_positions: Optional[Union[np.ndarray, tf.Tensor]] = None, end_positions: Optional[Union[np.ndarray, tf.Tensor]] = None, training: bool = False, ) -> Union[TFXLNetForQuestionAnsweringSimpleOutput, Tuple[tf.Tensor]]: r""" start_positions (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the start of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. end_positions (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the end of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. """ transformer_outputs = self.transformer( input_ids=input_ids, attention_mask=attention_mask, mems=mems, perm_mask=perm_mask, target_mapping=target_mapping, token_type_ids=token_type_ids, input_mask=input_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, use_mems=use_mems, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) sequence_output = transformer_outputs[0] logits = self.qa_outputs(sequence_output) start_logits, end_logits = tf.split(logits, 2, axis=-1) start_logits = tf.squeeze(start_logits, axis=-1) end_logits = tf.squeeze(end_logits, axis=-1) loss = None if start_positions is not None and end_positions is not None: labels = {"start_position": start_positions} labels["end_position"] = end_positions loss = self.hf_compute_loss(labels, (start_logits, end_logits)) if not return_dict: output = (start_logits, end_logits) + transformer_outputs[1:] return ((loss,) + output) if loss is not None else output return TFXLNetForQuestionAnsweringSimpleOutput( loss=loss, start_logits=start_logits, end_logits=end_logits, mems=transformer_outputs.mems, hidden_states=transformer_outputs.hidden_states, attentions=transformer_outputs.attentions, ) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None mems = tf.convert_to_tensor(output.mems) if output.mems is not None else None return TFXLNetForQuestionAnsweringSimpleOutput( start_logits=output.start_logits, end_logits=output.end_logits, mems=mems, hidden_states=hs, attentions=attns, )
27182812/ChatGLM-LLaMA-chinese-insturct
11,143
src/transformers/models/xlnet/configuration_xlnet.py
# coding=utf-8 # Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ XLNet configuration""" import warnings from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP = { "xlnet-base-cased": "https://huggingface.co/xlnet-base-cased/resolve/main/config.json", "xlnet-large-cased": "https://huggingface.co/xlnet-large-cased/resolve/main/config.json", } class XLNetConfig(PretrainedConfig): """ This is the configuration class to store the configuration of a [`XLNetModel`] or a [`TFXLNetModel`]. It is used to instantiate a XLNet model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the [xlnet-large-cased](https://huggingface.co/xlnet-large-cased) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 32000): Vocabulary size of the XLNet model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`XLNetModel`] or [`TFXLNetModel`]. d_model (`int`, *optional*, defaults to 1024): Dimensionality of the encoder layers and the pooler layer. n_layer (`int`, *optional*, defaults to 24): Number of hidden layers in the Transformer encoder. n_head (`int`, *optional*, defaults to 16): Number of attention heads for each attention layer in the Transformer encoder. d_inner (`int`, *optional*, defaults to 4096): Dimensionality of the "intermediate" (often named feed-forward) layer in the Transformer encoder. ff_activation (`str` or `Callable`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the If string, `"gelu"`, `"relu"`, `"silu"` and `"gelu_new"` are supported. untie_r (`bool`, *optional*, defaults to `True`): Whether or not to untie relative position biases attn_type (`str`, *optional*, defaults to `"bi"`): The attention type used by the model. Set `"bi"` for XLNet, `"uni"` for Transformer-XL. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. dropout (`float`, *optional*, defaults to 0.1): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. mem_len (`int` or `None`, *optional*): The number of tokens to cache. The key/value pairs that have already been pre-computed in a previous forward pass won't be re-computed. See the [quickstart](https://huggingface.co/transformers/quickstart.html#using-the-past) for more information. reuse_len (`int`, *optional*): The number of tokens in the current batch to be cached and reused in the future. bi_data (`bool`, *optional*, defaults to `False`): Whether or not to use bidirectional input pipeline. Usually set to `True` during pretraining and `False` during finetuning. clamp_len (`int`, *optional*, defaults to -1): Clamp all relative distances larger than clamp_len. Setting this attribute to -1 means no clamping. same_length (`bool`, *optional*, defaults to `False`): Whether or not to use the same attention length for each token. summary_type (`str`, *optional*, defaults to "last"): Argument used when doing sequence summary. Used in the sequence classification and multiple choice models. Has to be one of the following options: - `"last"`: Take the last token hidden state (like XLNet). - `"first"`: Take the first token hidden state (like BERT). - `"mean"`: Take the mean of all tokens hidden states. - `"cls_index"`: Supply a Tensor of classification token position (like GPT/GPT-2). - `"attn"`: Not implemented now, use multi-head attention. summary_use_proj (`bool`, *optional*, defaults to `True`): Argument used when doing sequence summary. Used in the sequence classification and multiple choice models. Whether or not to add a projection after the vector extraction. summary_activation (`str`, *optional*): Argument used when doing sequence summary. Used in the sequence classification and multiple choice models. Pass `"tanh"` for a tanh activation to the output, any other value will result in no activation. summary_proj_to_labels (`boo`, *optional*, defaults to `True`): Used in the sequence classification and multiple choice models. Whether the projection outputs should have `config.num_labels` or `config.hidden_size` classes. summary_last_dropout (`float`, *optional*, defaults to 0.1): Used in the sequence classification and multiple choice models. The dropout ratio to be used after the projection and activation. start_n_top (`int`, *optional*, defaults to 5): Used in the SQuAD evaluation script. end_n_top (`int`, *optional*, defaults to 5): Used in the SQuAD evaluation script. use_mems_eval (`bool`, *optional*, defaults to `True`): Whether or not the model should make use of the recurrent memory mechanism in evaluation mode. use_mems_train (`bool`, *optional*, defaults to `False`): Whether or not the model should make use of the recurrent memory mechanism in train mode. <Tip> For pretraining, it is recommended to set `use_mems_train` to `True`. For fine-tuning, it is recommended to set `use_mems_train` to `False` as discussed [here](https://github.com/zihangdai/xlnet/issues/41#issuecomment-505102587). If `use_mems_train` is set to `True`, one has to make sure that the train batches are correctly pre-processed, *e.g.* `batch_1 = [[This line is], [This is the]]` and `batch_2 = [[ the first line], [ second line]]` and that all batches are of equal size. </Tip> Examples: ```python >>> from transformers import XLNetConfig, XLNetModel >>> # Initializing a XLNet configuration >>> configuration = XLNetConfig() >>> # Initializing a model (with random weights) from the configuration >>> model = XLNetModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "xlnet" keys_to_ignore_at_inference = ["mems"] attribute_map = { "n_token": "vocab_size", # Backward compatibility "hidden_size": "d_model", "num_attention_heads": "n_head", "num_hidden_layers": "n_layer", } def __init__( self, vocab_size=32000, d_model=1024, n_layer=24, n_head=16, d_inner=4096, ff_activation="gelu", untie_r=True, attn_type="bi", initializer_range=0.02, layer_norm_eps=1e-12, dropout=0.1, mem_len=512, reuse_len=None, use_mems_eval=True, use_mems_train=False, bi_data=False, clamp_len=-1, same_length=False, summary_type="last", summary_use_proj=True, summary_activation="tanh", summary_last_dropout=0.1, start_n_top=5, end_n_top=5, pad_token_id=5, bos_token_id=1, eos_token_id=2, **kwargs, ): """Constructs XLNetConfig.""" self.vocab_size = vocab_size self.d_model = d_model self.n_layer = n_layer self.n_head = n_head if d_model % n_head != 0: raise ValueError(f"'d_model % n_head' ({d_model % n_head}) should be equal to 0") if "d_head" in kwargs: if kwargs["d_head"] != d_model // n_head: raise ValueError( f"`d_head` ({kwargs['d_head']}) should be equal to `d_model // n_head` ({d_model // n_head})" ) self.d_head = d_model // n_head self.ff_activation = ff_activation self.d_inner = d_inner self.untie_r = untie_r self.attn_type = attn_type self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.dropout = dropout self.mem_len = mem_len self.reuse_len = reuse_len self.bi_data = bi_data self.clamp_len = clamp_len self.same_length = same_length self.summary_type = summary_type self.summary_use_proj = summary_use_proj self.summary_activation = summary_activation self.summary_last_dropout = summary_last_dropout self.start_n_top = start_n_top self.end_n_top = end_n_top self.bos_token_id = bos_token_id self.pad_token_id = pad_token_id self.eos_token_id = eos_token_id if "use_cache" in kwargs: warnings.warn( "The `use_cache` argument is deprecated and will be removed in a future version, use `use_mems_eval`" " instead.", FutureWarning, ) use_mems_eval = kwargs["use_cache"] self.use_mems_eval = use_mems_eval self.use_mems_train = use_mems_train super().__init__(pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs) @property def max_position_embeddings(self): logger.info(f"The model {self.model_type} is one of the few models that has no sequence length limit.") return -1 @max_position_embeddings.setter def max_position_embeddings(self, value): # Message copied from Transformer-XL documentation raise NotImplementedError( f"The model {self.model_type} is one of the few models that has no sequence length limit." )
27182812/ChatGLM-LLaMA-chinese-insturct
4,933
src/transformers/models/data2vec/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available _import_structure = { "configuration_data2vec_audio": ["DATA2VEC_AUDIO_PRETRAINED_CONFIG_ARCHIVE_MAP", "Data2VecAudioConfig"], "configuration_data2vec_text": [ "DATA2VEC_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP", "Data2VecTextConfig", "Data2VecTextOnnxConfig", ], "configuration_data2vec_vision": [ "DATA2VEC_VISION_PRETRAINED_CONFIG_ARCHIVE_MAP", "Data2VecVisionConfig", "Data2VecVisionOnnxConfig", ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_data2vec_audio"] = [ "DATA2VEC_AUDIO_PRETRAINED_MODEL_ARCHIVE_LIST", "Data2VecAudioForAudioFrameClassification", "Data2VecAudioForCTC", "Data2VecAudioForSequenceClassification", "Data2VecAudioForXVector", "Data2VecAudioModel", "Data2VecAudioPreTrainedModel", ] _import_structure["modeling_data2vec_text"] = [ "DATA2VEC_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST", "Data2VecTextForCausalLM", "Data2VecTextForMaskedLM", "Data2VecTextForMultipleChoice", "Data2VecTextForQuestionAnswering", "Data2VecTextForSequenceClassification", "Data2VecTextForTokenClassification", "Data2VecTextModel", "Data2VecTextPreTrainedModel", ] _import_structure["modeling_data2vec_vision"] = [ "DATA2VEC_VISION_PRETRAINED_MODEL_ARCHIVE_LIST", "Data2VecVisionForImageClassification", "Data2VecVisionForMaskedImageModeling", "Data2VecVisionForSemanticSegmentation", "Data2VecVisionModel", "Data2VecVisionPreTrainedModel", ] if is_tf_available(): _import_structure["modeling_tf_data2vec_vision"] = [ "TFData2VecVisionForImageClassification", "TFData2VecVisionForSemanticSegmentation", "TFData2VecVisionModel", "TFData2VecVisionPreTrainedModel", ] if TYPE_CHECKING: from .configuration_data2vec_audio import DATA2VEC_AUDIO_PRETRAINED_CONFIG_ARCHIVE_MAP, Data2VecAudioConfig from .configuration_data2vec_text import ( DATA2VEC_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP, Data2VecTextConfig, Data2VecTextOnnxConfig, ) from .configuration_data2vec_vision import ( DATA2VEC_VISION_PRETRAINED_CONFIG_ARCHIVE_MAP, Data2VecVisionConfig, Data2VecVisionOnnxConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_data2vec_audio import ( DATA2VEC_AUDIO_PRETRAINED_MODEL_ARCHIVE_LIST, Data2VecAudioForAudioFrameClassification, Data2VecAudioForCTC, Data2VecAudioForSequenceClassification, Data2VecAudioForXVector, Data2VecAudioModel, Data2VecAudioPreTrainedModel, ) from .modeling_data2vec_text import ( DATA2VEC_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST, Data2VecTextForCausalLM, Data2VecTextForMaskedLM, Data2VecTextForMultipleChoice, Data2VecTextForQuestionAnswering, Data2VecTextForSequenceClassification, Data2VecTextForTokenClassification, Data2VecTextModel, Data2VecTextPreTrainedModel, ) from .modeling_data2vec_vision import ( DATA2VEC_VISION_PRETRAINED_MODEL_ARCHIVE_LIST, Data2VecVisionForImageClassification, Data2VecVisionForMaskedImageModeling, Data2VecVisionForSemanticSegmentation, Data2VecVisionModel, Data2VecVisionPreTrainedModel, ) if is_tf_available(): from .modeling_tf_data2vec_vision import ( TFData2VecVisionForImageClassification, TFData2VecVisionForSemanticSegmentation, TFData2VecVisionModel, TFData2VecVisionPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
15,344
src/transformers/models/data2vec/convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py
#!/usr/bin/env python3 import argparse import json import torch from huggingface_hub import hf_hub_download from PIL import Image from timm.models import create_model from transformers import ( BeitFeatureExtractor, Data2VecVisionConfig, Data2VecVisionForImageClassification, Data2VecVisionModel, ) def create_rename_keys(config, has_lm_head=False, is_semantic=False, hf_prefix="data2vec."): prefix = "backbone." if is_semantic else "" rename_keys = [] for i in range(config.num_hidden_layers): # encoder layers: output projection, 2 feedforward neural networks and 2 layernorms rename_keys.append( (f"{prefix}blocks.{i}.norm1.weight", f"{hf_prefix}encoder.layer.{i}.layernorm_before.weight") ) rename_keys.append((f"{prefix}blocks.{i}.norm1.bias", f"{hf_prefix}encoder.layer.{i}.layernorm_before.bias")) rename_keys.append( (f"{prefix}blocks.{i}.attn.proj.weight", f"{hf_prefix}encoder.layer.{i}.attention.output.dense.weight") ) rename_keys.append( (f"{prefix}blocks.{i}.attn.proj.bias", f"{hf_prefix}encoder.layer.{i}.attention.output.dense.bias") ) rename_keys.append( (f"{prefix}blocks.{i}.norm2.weight", f"{hf_prefix}encoder.layer.{i}.layernorm_after.weight") ) rename_keys.append((f"{prefix}blocks.{i}.norm2.bias", f"{hf_prefix}encoder.layer.{i}.layernorm_after.bias")) rename_keys.append( (f"{prefix}blocks.{i}.mlp.fc1.weight", f"{hf_prefix}encoder.layer.{i}.intermediate.dense.weight") ) rename_keys.append( (f"{prefix}blocks.{i}.mlp.fc1.bias", f"{hf_prefix}encoder.layer.{i}.intermediate.dense.bias") ) rename_keys.append((f"{prefix}blocks.{i}.mlp.fc2.weight", f"{hf_prefix}encoder.layer.{i}.output.dense.weight")) rename_keys.append((f"{prefix}blocks.{i}.mlp.fc2.bias", f"{hf_prefix}encoder.layer.{i}.output.dense.bias")) # projection layer + position embeddings rename_keys.extend( [ (f"{prefix}cls_token", f"{hf_prefix}embeddings.cls_token"), (f"{prefix}patch_embed.proj.weight", f"{hf_prefix}embeddings.patch_embeddings.projection.weight"), (f"{prefix}patch_embed.proj.bias", f"{hf_prefix}embeddings.patch_embeddings.projection.bias"), ] ) if has_lm_head: # mask token + shared relative position bias + layernorm rename_keys.extend( [ ("mask_token", f"{hf_prefix}embeddings.mask_token"), ( "rel_pos_bias.relative_position_bias_table", f"{hf_prefix}encoder.relative_position_bias.relative_position_bias_table", ), ( "rel_pos_bias.relative_position_index", f"{hf_prefix}encoder.relative_position_bias.relative_position_index", ), ("norm.weight", "layernorm.weight"), ("norm.bias", "layernorm.bias"), ] ) elif is_semantic: # semantic segmentation classification heads rename_keys.extend( [ ("decode_head.conv_seg.weight", "decode_head.classifier.weight"), ("decode_head.conv_seg.bias", "decode_head.classifier.bias"), ("auxiliary_head.conv_seg.weight", "auxiliary_head.classifier.weight"), ("auxiliary_head.conv_seg.bias", "auxiliary_head.classifier.bias"), ] ) else: # layernorm + classification head rename_keys.extend( [ ("fc_norm.weight", f"{hf_prefix}pooler.layernorm.weight"), ("fc_norm.bias", f"{hf_prefix}pooler.layernorm.bias"), ("head.weight", "classifier.weight"), ("head.bias", "classifier.bias"), ] ) return rename_keys def read_in_q_k_v(state_dict, config, has_lm_head=False, is_semantic=False, hf_prefix="data2vec_vision."): for i in range(config.num_hidden_layers): prefix = "backbone." if is_semantic else "" # queries, keys and values in_proj_weight = state_dict.pop(f"{prefix}blocks.{i}.attn.qkv.weight") q_bias = state_dict.pop(f"{prefix}blocks.{i}.attn.q_bias") v_bias = state_dict.pop(f"{prefix}blocks.{i}.attn.v_bias") state_dict[f"{hf_prefix}encoder.layer.{i}.attention.attention.query.weight"] = in_proj_weight[ : config.hidden_size, : ] state_dict[f"{hf_prefix}encoder.layer.{i}.attention.attention.query.bias"] = q_bias state_dict[f"{hf_prefix}encoder.layer.{i}.attention.attention.key.weight"] = in_proj_weight[ config.hidden_size : config.hidden_size * 2, : ] state_dict[f"{hf_prefix}encoder.layer.{i}.attention.attention.value.weight"] = in_proj_weight[ -config.hidden_size :, : ] state_dict[f"{hf_prefix}encoder.layer.{i}.attention.attention.value.bias"] = v_bias # gamma_1 and gamma_2 # we call them lambda because otherwise they are renamed when using .from_pretrained gamma_1 = state_dict.pop(f"{prefix}blocks.{i}.gamma_1") gamma_2 = state_dict.pop(f"{prefix}blocks.{i}.gamma_2") state_dict[f"{hf_prefix}encoder.layer.{i}.lambda_1"] = gamma_1 state_dict[f"{hf_prefix}encoder.layer.{i}.lambda_2"] = gamma_2 # relative_position bias table + index if not has_lm_head: # each layer has its own relative position bias table = state_dict.pop(f"{prefix}blocks.{i}.attn.relative_position_bias_table") index = state_dict.pop(f"{prefix}blocks.{i}.attn.relative_position_index") state_dict[ f"{hf_prefix}encoder.layer.{i}.attention.attention.relative_position_bias.relative_position_bias_table" ] = table state_dict[ f"{hf_prefix}encoder.layer.{i}.attention.attention.relative_position_bias.relative_position_index" ] = index def get_args(): parser = argparse.ArgumentParser( "Convert Data2VecVision to HF for image classification and pretraining", add_help=False ) parser.add_argument("--hf_checkpoint_name", type=str) parser.add_argument("--input_size", default=224, type=int, help="images input size") parser.add_argument("--beit_checkpoint", default="", help="beit checkpoint") return parser.parse_args() def load_beit_model(args, is_finetuned, is_large): def load_state_dict(model, state_dict, prefix="", ignore_missing="relative_position_index"): missing_keys = [] unexpected_keys = [] error_msgs = [] # copy state_dict so _load_from_state_dict can modify it metadata = getattr(state_dict, "_metadata", None) state_dict = state_dict.copy() if metadata is not None: state_dict._metadata = metadata def load(module, prefix=""): local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {}) module._load_from_state_dict( state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs ) for name, child in module._modules.items(): if child is not None: load(child, prefix + name + ".") load(model, prefix=prefix) warn_missing_keys = [] ignore_missing_keys = [] for key in missing_keys: keep_flag = True for ignore_key in ignore_missing.split("|"): if ignore_key in key: keep_flag = False break if keep_flag: warn_missing_keys.append(key) else: ignore_missing_keys.append(key) missing_keys = warn_missing_keys if len(missing_keys) > 0: print( "Weights of {} not initialized from pretrained model: {}".format( model.__class__.__name__, missing_keys ) ) if len(unexpected_keys) > 0: print("Weights from pretrained model not used in {}: {}".format(model.__class__.__name__, unexpected_keys)) if len(ignore_missing_keys) > 0: print( "Ignored weights of {} not initialized from pretrained model: {}".format( model.__class__.__name__, ignore_missing_keys ) ) if len(error_msgs) > 0: print("\n".join(error_msgs)) model_kwargs = { "pretrained": False, "use_shared_rel_pos_bias": True, "use_abs_pos_emb": False, "init_values": 0.1, } if is_finetuned: model_kwargs.update( { "num_classes": 1000, "use_mean_pooling": True, "init_scale": 0.001, "use_rel_pos_bias": True, } ) model = create_model( "beit_large_patch16_224" if is_large else "beit_base_patch16_224", **model_kwargs, ) patch_size = model.patch_embed.patch_size args.window_size = (args.input_size // patch_size[0], args.input_size // patch_size[1]) checkpoint = torch.load(args.beit_checkpoint, map_location="cpu") print(f"Load ckpt from {args.beit_checkpoint}") checkpoint_model = None for model_key in ("model", "module"): if model_key in checkpoint: checkpoint_model = checkpoint[model_key] print(f"Load state_dict by model_key = {model_key}") break all_keys = list(checkpoint_model.keys()) for key in all_keys: if "relative_position_index" in key: checkpoint_model.pop(key) if "relative_position_bias_table" in key: rel_pos_bias = checkpoint_model[key] src_num_pos, num_attn_heads = rel_pos_bias.size() dst_num_pos, _ = model.state_dict()[key].size() dst_patch_shape = model.patch_embed.patch_shape if dst_patch_shape[0] != dst_patch_shape[1]: raise NotImplementedError() load_state_dict(model, checkpoint_model, prefix="") return model def main(): args = get_args() is_finetuned = "ft1k" in args.hf_checkpoint_name is_large = "large" in args.hf_checkpoint_name if is_finetuned: # To convert Beit's data2vec_vision to HF you need to copy # https://github.com/facebookresearch/data2vec_vision/blob/main/beit/modeling_finetune.py # into this folder. import modeling_finetune # noqa: F401 else: # To convert Beit's data2vec_vision to HF you need to copy # https://github.com/facebookresearch/data2vec_vision/blob/main/beit/modeling_cyclical.py # into this folder # IMPORTANT: Note that for now we've only converted the down-stream # model and not the full pretrained model. This means for the integration # test you need to add a `return x` after the following line: # https://github.com/facebookresearch/data2vec_vision/blob/af9a36349aaed59ae66e69b5dabeef2d62fdc5da/beit/modeling_cyclical.py#L197 # to make the integration test pass. import modeling_cyclical # noqa: F401 # 1. Create model config config = Data2VecVisionConfig() if is_finetuned: config.use_relative_position_bias = True config.use_shared_relative_position_bias = False config.use_mean_pooling = True config.num_labels = 1000 repo_id = "huggingface/label-files" filename = "imagenet-1k-id2label.json" id2label = json.load(open(hf_hub_download(repo_id, filename, repo_type="dataset"), "r")) id2label = {int(k): v for k, v in id2label.items()} config.id2label = id2label config.label2id = {v: k for k, v in id2label.items()} else: config.use_relative_position_bias = False config.use_shared_relative_position_bias = True config.use_mean_pooling = False if is_large: config.hidden_size = 1024 config.intermediate_size = 4096 config.num_hidden_layers = 24 config.num_attention_heads = 16 # 2. Load Beit model orig_model = load_beit_model(args, is_finetuned, is_large) orig_model.eval() # 3. Forward Beit model feature_extractor = BeitFeatureExtractor(size=config.image_size, do_center_crop=False) image = Image.open("../../../../tests/fixtures/tests_samples/COCO/000000039769.png") encoding = feature_extractor(images=image, return_tensors="pt") pixel_values = encoding["pixel_values"] orig_args = (pixel_values,) if is_finetuned else (pixel_values, None) with torch.no_grad(): orig_model_output = orig_model(*orig_args) # 4. Load HF Data2VecVision model if is_finetuned: hf_model = Data2VecVisionForImageClassification(config) hf_model.eval() has_lm_head = False hf_prefix = "data2vec_vision." else: hf_model = Data2VecVisionModel(config) hf_model.eval() has_lm_head = True hf_prefix = "" rename_keys = create_rename_keys(config, hf_prefix=hf_prefix, has_lm_head=has_lm_head) state_dict = orig_model.state_dict() for src, dest in rename_keys: val = state_dict.pop(src) state_dict[dest] = val read_in_q_k_v(state_dict, config, hf_prefix=hf_prefix, has_lm_head=has_lm_head) missing_keys, unexpected_keys = hf_model.load_state_dict(state_dict, strict=False) print("HF missing", missing_keys) print("HF unexpected_keys", unexpected_keys) # 5. Forward HF Data2VecVision model with torch.no_grad(): hf_model_output = hf_model(pixel_values) hf_output = hf_model_output.logits if is_finetuned else hf_model_output.last_hidden_state # 6. Compare max_absolute_diff = torch.max(torch.abs(hf_output - orig_model_output)).item() print(f"max_absolute_diff = {max_absolute_diff}") success = torch.allclose(hf_output, orig_model_output, atol=1e-3) print("Do both models output the same tensors?", "🔥" if success else "💩") if not success: raise Exception("Something went wRoNg") # 7. Save print(f"Saving to {args.hf_checkpoint_name}") hf_model.save_pretrained(args.hf_checkpoint_name) feature_extractor.save_pretrained(args.hf_checkpoint_name) if __name__ == "__main__": main() # Run the following to convert checkpoints # python ./convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py \ # --beit_checkpoint ./pretrained_base.pt \ # --hf_checkpoint_name "./data2vec-vision-base" # python ./convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py \ # --beit_checkpoint ./finetuned_base.pt \ # --hf_checkpoint_name "./data2vec-vision-base-ft1k" # python ./convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py \ # --beit_checkpoint ./pretrained_large.pt \ # --hf_checkpoint_name "./data2vec-vision-large" # python ./convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py \ # --beit_checkpoint ./finetuned_large.pt \ # --hf_checkpoint_name "./data2vec-vision-large-ft1k"
27182812/ChatGLM-LLaMA-chinese-insturct
10,852
src/transformers/models/data2vec/convert_data2vec_audio_original_pytorch_checkpoint_to_pytorch.py
# coding=utf-8 # Copyright 2021 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert Wav2Vec2 checkpoint.""" import argparse import os from functools import reduce import fairseq import torch from datasets import load_dataset from transformers import Wav2Vec2Processor, logging from transformers.models.data2vec.configuration_data2vec_audio import Data2VecAudioConfig # Copied from https://github.com/pytorch/fairseq/blob/main/examples/data2vec/models/data2vec_audio.py from transformers.models.data2vec.data2vec_audio import Data2VecAudioModel as Dummy # noqa: F401 from transformers.models.data2vec.modeling_data2vec_audio import Data2VecAudioForCTC, Data2VecAudioModel logging.set_verbosity_info() logger = logging.get_logger(__name__) MAPPING = { "post_extract_proj": "feature_projection.projection", "models.0.layer_norm": "feature_projection.layer_norm", "self_attn.k_proj": "encoder.layers.*.attention.k_proj", "self_attn.v_proj": "encoder.layers.*.attention.v_proj", "self_attn.q_proj": "encoder.layers.*.attention.q_proj", "self_attn.out_proj": "encoder.layers.*.attention.out_proj", "self_attn_layer_norm": "encoder.layers.*.layer_norm", "fc1": "encoder.layers.*.feed_forward.intermediate_dense", "fc2": "encoder.layers.*.feed_forward.output_dense", "final_layer_norm": "encoder.layers.*.final_layer_norm", "encoder.layer_norm": "encoder.layer_norm", "w2v_model.layer_norm": "feature_projection.layer_norm", "w2v_encoder.proj": "lm_head", "mask_emb": "masked_spec_embed", } TOP_LEVEL_KEYS = [ "lm_head", ] def set_recursively(hf_pointer, key, value, full_name, weight_type): for attribute in key.split("."): hf_pointer = getattr(hf_pointer, attribute) if weight_type is not None: hf_shape = getattr(hf_pointer, weight_type).shape else: hf_shape = hf_pointer.shape if hf_shape != value.shape: raise ValueError( f"Shape of hf {key + '.' + weight_type if weight_type is not None else ''} is {hf_shape}, but should be" f" {value.shape} for {full_name}" ) if weight_type == "weight": hf_pointer.weight.data = value elif weight_type == "weight_g": hf_pointer.weight_g.data = value elif weight_type == "weight_v": hf_pointer.weight_v.data = value elif weight_type == "bias": hf_pointer.bias.data = value else: hf_pointer.data = value logger.info(f"{key + '.' + weight_type if weight_type is not None else ''} was initialized from {full_name}.") def recursively_load_weights(fairseq_model, hf_model, is_headless): unused_weights = [] fairseq_dict = fairseq_model.state_dict() if not is_headless: feature_extractor = hf_model.data2vec_audio.feature_extractor pos_conv_embedding = hf_model.data2vec_audio.encoder.pos_conv_embed else: feature_extractor = hf_model.feature_extractor pos_conv_embedding = hf_model.encoder.pos_conv_embed for name, value in fairseq_dict.items(): is_used = False if "conv_layers" in name: load_conv_layer( name, value, feature_extractor, unused_weights, ) is_used = True elif "pos_conv" in name: load_pos_conv_layer( name, value, pos_conv_embedding, unused_weights, ) is_used = True else: for key, mapped_key in MAPPING.items(): if not is_headless: mapped_key = "data2vec_audio." + mapped_key if mapped_key not in TOP_LEVEL_KEYS else mapped_key if key in name or key.split("w2v_model.")[-1] == name.split(".")[0]: is_used = True if "*" in mapped_key: layer_index = name.split(key)[0].split(".")[-2] mapped_key = mapped_key.replace("*", layer_index) if "weight_g" in name: weight_type = "weight_g" elif "weight_v" in name: weight_type = "weight_v" elif "bias" in name: weight_type = "bias" elif "weight" in name: # TODO: don't match quantizer.weight_proj weight_type = "weight" else: weight_type = None set_recursively(hf_model, mapped_key, value, name, weight_type) continue if not is_used: unused_weights.append(name) logger.warning(f"Unused weights: {unused_weights}") def access_by_string(module, path): names = path.split(".") return reduce(getattr, names, module) def set_weights(full_name, module, fsq_value, hf_weight_path): hf_weight = access_by_string(module, hf_weight_path) hf_value = hf_weight.data if fsq_value.shape != hf_value.shape: raise ValueError(f"{full_name} has size {fsq_value.shape}, but {hf_value.shape} was found.") hf_weight.data = fsq_value logger.info(f"{full_name} was correctly initialized from {hf_weight_path}.") def load_conv_layer(full_name, value, feature_extractor, unused_weights): name = full_name.split("conv_layers.")[-1] items = name.split(".") layer_id = int(items[0]) type_id = int(items[1]) weight_type = name.split(".")[-1] if type_id == 0: layer_type = "conv" elif type_id == 2: layer_type = "layer_norm" else: unused_weights.append(full_name) return set_weights(full_name, feature_extractor, value, f"conv_layers.{layer_id}.{layer_type}.{weight_type}") def load_pos_conv_layer(full_name, value, pos_conv_embeddings, unused_weights): name = full_name.split("pos_conv.")[-1] items = name.split(".") layer_id = int(items[0]) type_id = int(items[1]) weight_type = name.split(".")[-1] if type_id != 0: unused_weights.append(full_name) return else: layer_type = "conv" set_weights(full_name, pos_conv_embeddings, value, f"layers.{layer_id}.{layer_type}.{weight_type}") @torch.no_grad() def convert_wav2vec2_checkpoint( checkpoint_path, pytorch_dump_folder_path, config_path=None, dict_path=None, is_finetuned=True ): """ Copy/paste/tweak model's weights to transformers design. """ if config_path is not None: config = Data2VecAudioConfig.from_pretrained(config_path) else: config = Data2VecAudioConfig() if not is_finetuned: # Modify final_proj layer name hf_wav2vec = Data2VecAudioModel(config) data2vec_checkpoint_dir = os.path.dirname(checkpoint_path) state_dict = torch.load(checkpoint_path) state_dict["model"]["final_proj.weight"] = state_dict["model"].pop("final_proj.0.weight") state_dict["model"]["final_proj.bias"] = state_dict["model"].pop("final_proj.0.bias") converted_ckpt = os.path.join(data2vec_checkpoint_dir, "converted.pt") torch.save(state_dict, converted_ckpt) else: hf_wav2vec = Data2VecAudioForCTC(config) converted_ckpt = checkpoint_path def load_data2vec(path): model, _, _ = fairseq.checkpoint_utils.load_model_ensemble_and_task([path]) return model[0].eval() model = load_data2vec(converted_ckpt) recursively_load_weights(model, hf_wav2vec, not is_finetuned) processor = Wav2Vec2Processor.from_pretrained("facebook/wav2vec2-large-lv60") ds = load_dataset("patrickvonplaten/librispeech_asr_dummy", "clean", split="validation") input_audio = [x["array"] for x in ds[:4]["audio"]] inputs = processor(input_audio, return_tensors="pt", padding=True) input_values = inputs.input_values attention_mask = inputs.attention_mask # input_values = inputs.input_values[:, :-1] # attention_mask = inputs.attention_mask[:, :-1] hf_wav2vec.eval() model.eval() if is_finetuned: their_output = model(source=input_values, padding_mask=(1 - attention_mask), mask=False, features_only=True)[ "encoder_out" ].transpose(0, 1) our_output = hf_wav2vec(input_values, attention_mask=attention_mask)["logits"] pred_ids = torch.argmax(our_output, dim=-1) output_string = processor.batch_decode(pred_ids) print(f"Expected Output: {ds[:4]['text']}, Pred: {output_string}") else: their_output = model(source=input_values, padding_mask=(1 - attention_mask), mask=False, features_only=True)[ "layer_results" ][-1][0].transpose(0, 1) our_output = hf_wav2vec(input_values, attention_mask=attention_mask)["last_hidden_state"] print(our_output.shape, their_output.shape) max_absolute_diff = torch.max(torch.abs(our_output - their_output)).item() print(f"max_absolute_diff = {max_absolute_diff}") # ~ 1e-7 success = torch.allclose(our_output, their_output, atol=1e-3) print("Do both models output the same tensors?", "🔥" if success else "💩") if not success: raise Exception("Something went wRoNg") hf_wav2vec.save_pretrained(pytorch_dump_folder_path) if is_finetuned: processor.save_pretrained(pytorch_dump_folder_path) else: processor.feature_extractor.save_pretrained(pytorch_dump_folder_path) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--pytorch_dump_folder_path", default=None, type=str, help="Path to the output PyTorch model.") parser.add_argument("--checkpoint_path", default=None, type=str, help="Path to fairseq checkpoint") parser.add_argument("--dict_path", default=None, type=str, help="Path to dict of fine-tuned model") parser.add_argument("--config_path", default=None, type=str, help="Path to hf config.json of model to convert") parser.add_argument( "--not_finetuned", action="store_true", help="Whether the model to convert is a fine-tuned model or not" ) args = parser.parse_args() convert_wav2vec2_checkpoint( args.checkpoint_path, args.pytorch_dump_folder_path, args.config_path, args.dict_path, not args.not_finetuned )
27182812/ChatGLM-LLaMA-chinese-insturct
9,432
src/transformers/models/data2vec/configuration_data2vec_vision.py
# coding=utf-8 # Copyright Meta Platforms and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Data2VecVision model configuration""" from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging logger = logging.get_logger(__name__) DATA2VEC_VISION_PRETRAINED_CONFIG_ARCHIVE_MAP = { "facebook/data2vec-vision-base-ft": ( "https://huggingface.co/facebook/data2vec-vision-base-ft/resolve/main/config.json" ), } class Data2VecVisionConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`Data2VecVisionModel`]. It is used to instantiate an Data2VecVision model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the Data2VecVision [facebook/data2vec-vision-base](https://huggingface.co/facebook/data2vec-vision-base) architecture. Args: hidden_size (`int`, *optional*, defaults to 768): Dimensionality of the encoder layers and the pooler layer. num_hidden_layers (`int`, *optional*, defaults to 12): Number of hidden layers in the Transformer encoder. num_attention_heads (`int`, *optional*, defaults to 12): Number of attention heads for each attention layer in the Transformer encoder. intermediate_size (`int`, *optional*, defaults to 3072): Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder. hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported. hidden_dropout_prob (`float`, *optional*, defaults to 0.0): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. attention_probs_dropout_prob (`float`, *optional*, defaults to 0.0): The dropout ratio for the attention probabilities. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. image_size (`int`, *optional*, defaults to 224): The size (resolution) of each image. patch_size (`int`, *optional*, defaults to 16): The size (resolution) of each patch. num_channels (`int`, *optional*, defaults to 3): The number of input channels. use_mask_token (`bool`, *optional*, defaults to `False`): Whether to use a mask token for masked image modeling. use_absolute_position_embeddings (`bool`, *optional*, defaults to `False`): Whether to use BERT-style absolute position embeddings. use_relative_position_bias (`bool`, *optional*, defaults to `False`): Whether to use T5-style relative position embeddings in the self-attention layers. use_shared_relative_position_bias (`bool`, *optional*, defaults to `False`): Whether to use the same relative position embeddings across all self-attention layers of the Transformer. layer_scale_init_value (`float`, *optional*, defaults to 0.1): Scale to use in the self-attention layers. 0.1 for base, 1e-5 for large. Set 0 to disable layer scale. drop_path_rate (`float`, *optional*, defaults to 0.1): Stochastic depth rate per sample (when applied in the main path of residual layers). use_mean_pooling (`bool`, *optional*, defaults to `True`): Whether to mean pool the final hidden states of the patches instead of using the final hidden state of the CLS token, before applying the classification head. out_indices (`List[int]`, *optional*, defaults to `[3, 5, 7, 11]`): Indices of the feature maps to use for semantic segmentation. pool_scales (`Tuple[int]`, *optional*, defaults to `[1, 2, 3, 6]`): Pooling scales used in Pooling Pyramid Module applied on the last feature map. use_auxiliary_head (`bool`, *optional*, defaults to `True`): Whether to use an auxiliary head during training. auxiliary_loss_weight (`float`, *optional*, defaults to 0.4): Weight of the cross-entropy loss of the auxiliary head. auxiliary_channels (`int`, *optional*, defaults to 256): Number of channels to use in the auxiliary head. auxiliary_num_convs (`int`, *optional*, defaults to 1): Number of convolutional layers to use in the auxiliary head. auxiliary_concat_input (`bool`, *optional*, defaults to `False`): Whether to concatenate the output of the auxiliary head with the input before the classification layer. semantic_loss_ignore_index (`int`, *optional*, defaults to 255): The index that is ignored by the loss function of the semantic segmentation model. Example: ```python >>> from transformers import Data2VecVisionConfig, Data2VecVisionModel >>> # Initializing a Data2VecVision data2vec_vision-base-patch16-224-in22k style configuration >>> configuration = Data2VecVisionConfig() >>> # Initializing a model (with random weights) from the data2vec_vision-base-patch16-224-in22k style configuration >>> model = Data2VecVisionModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "data2vec-vision" def __init__( self, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072, hidden_act="gelu", hidden_dropout_prob=0.0, attention_probs_dropout_prob=0.0, initializer_range=0.02, layer_norm_eps=1e-12, image_size=224, patch_size=16, num_channels=3, use_mask_token=False, use_absolute_position_embeddings=False, use_relative_position_bias=False, use_shared_relative_position_bias=False, layer_scale_init_value=0.1, drop_path_rate=0.1, use_mean_pooling=True, out_indices=[3, 5, 7, 11], pool_scales=[1, 2, 3, 6], use_auxiliary_head=True, auxiliary_loss_weight=0.4, auxiliary_channels=256, auxiliary_num_convs=1, auxiliary_concat_input=False, semantic_loss_ignore_index=255, **kwargs, ): super().__init__(**kwargs) self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.hidden_act = hidden_act self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.image_size = image_size self.patch_size = patch_size self.num_channels = num_channels self.use_mask_token = use_mask_token self.use_absolute_position_embeddings = use_absolute_position_embeddings self.use_relative_position_bias = use_relative_position_bias self.use_shared_relative_position_bias = use_shared_relative_position_bias self.layer_scale_init_value = layer_scale_init_value self.drop_path_rate = drop_path_rate self.use_mean_pooling = use_mean_pooling # decode head attributes (semantic segmentation) self.out_indices = out_indices self.pool_scales = pool_scales # auxiliary head attributes (semantic segmentation) self.use_auxiliary_head = use_auxiliary_head self.auxiliary_loss_weight = auxiliary_loss_weight self.auxiliary_channels = auxiliary_channels self.auxiliary_num_convs = auxiliary_num_convs self.auxiliary_concat_input = auxiliary_concat_input self.semantic_loss_ignore_index = semantic_loss_ignore_index # Copied from transformers.models.vit.configuration_vit.ViTOnnxConfig class Data2VecVisionOnnxConfig(OnnxConfig): torch_onnx_minimum_version = version.parse("1.11") @property def inputs(self) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("pixel_values", {0: "batch", 1: "num_channels", 2: "height", 3: "width"}), ] ) @property def atol_for_validation(self) -> float: return 1e-4
27182812/ChatGLM-LLaMA-chinese-insturct
62,581
src/transformers/models/data2vec/modeling_tf_data2vec_vision.py
# coding=utf-8 # Copyright 2022 Meta Platforms and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TF 2.0 Data2Vec Vision model.""" import collections.abc import math from dataclasses import dataclass from typing import Dict, List, Optional, Tuple, Union import numpy as np import tensorflow as tf from ...activations_tf import get_tf_activation from ...modeling_tf_outputs import ( TFBaseModelOutput, TFBaseModelOutputWithPooling, TFSemanticSegmenterOutput, TFSequenceClassifierOutput, ) from ...modeling_tf_utils import ( TFModelInputType, TFPreTrainedModel, TFSequenceClassificationLoss, get_initializer, keras_serializable, unpack_inputs, ) from ...tf_utils import shape_list, stable_softmax from ...utils import ( add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_data2vec_vision import Data2VecVisionConfig logger = logging.get_logger(__name__) # General docstring _CONFIG_FOR_DOC = "Data2VecVisionConfig" # Base docstring _CHECKPOINT_FOR_DOC = "facebook/data2vec-vision-base" _EXPECTED_OUTPUT_SHAPE = [1, 197, 768] # Image classification docstring _IMAGE_CLASS_CHECKPOINT = "facebook/data2vec-vision-base-ft1k" _IMAGE_CLASS_EXPECTED_OUTPUT = "remote control, remote" TF_DATA2VEC_VISION_PRETRAINED_MODEL_ARCHIVE_LIST = [ "facebook/data2vec-vision-base-ft1k", # See all Data2VecVision models at https://huggingface.co/models?filter=data2vec-vision ] @dataclass class TFData2VecVisionModelOutputWithPooling(TFBaseModelOutputWithPooling): """ Class for outputs of [`TFData2VecVisionModel`]. Args: last_hidden_state (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the model. pooler_output (`tf.Tensor` of shape `(batch_size, hidden_size)`): Average of the last layer hidden states of the patch tokens (excluding the *[CLS]* token) if *config.use_mean_pooling* is set to True. If set to False, then the final hidden state of the *[CLS]* token will be returned. hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ last_hidden_state: tf.Tensor = None pooler_output: tf.Tensor = None hidden_states: Optional[Tuple[tf.Tensor]] = None attentions: Optional[Tuple[tf.Tensor]] = None class TFData2VecVisionDropPath(tf.keras.layers.Layer): """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). References: (1) github.com:rwightman/pytorch-image-models """ def __init__(self, drop_path, **kwargs): super().__init__(**kwargs) self.drop_path = drop_path def call(self, x, training=None): if training: keep_prob = 1 - self.drop_path shape = (tf.shape(x)[0],) + (1,) * (len(tf.shape(x)) - 1) random_tensor = keep_prob + tf.random.uniform(shape, 0, 1) random_tensor = tf.floor(random_tensor) return (x / keep_prob) * random_tensor return x class TFData2VecVisionEmbeddings(tf.keras.layers.Layer): """ Construct the CLS token, position and patch embeddings. Optionally, also the mask token. """ def __init__(self, config: Data2VecVisionConfig, **kwargs): super().__init__(**kwargs) self.config = config self.patch_embeddings = TFData2VecVisionPatchEmbeddings(config, name="patch_embeddings") self.num_patches = self.patch_embeddings.num_patches self.config = config self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) def build(self, input_shape: tf.TensorShape): self.cls_token = self.add_weight( shape=(1, 1, self.config.hidden_size), initializer=tf.random_normal_initializer(stddev=self.config.initializer_range), trainable=True, name="cls_token", ) if self.config.use_mask_token: self.mask_token = self.add_weight( shape=(1, 1, self.config.hidden_size), initializer=tf.random_normal_initializer(stddev=self.config.initializer_range), trainable=True, name="mask_token", ) else: self.mask_token = None if self.config.use_absolute_position_embeddings: self.position_embeddings = self.add_weight( shape=(1, self.num_patches + 1, self.config.hidden_size), initializer=tf.random_normal_initializer(stddev=self.config.initializer_range), trainable=True, name="position_embeddings", ) else: self.position_embeddings = None super().build(input_shape) def call(self, pixel_values: tf.Tensor, bool_masked_pos: Optional[tf.Tensor] = None) -> tf.Tensor: embeddings = self.patch_embeddings(pixel_values) batch_size, seq_len, projection_dim = shape_list(embeddings) cls_tokens = tf.tile(self.cls_token, (batch_size, 1, 1)) if bool_masked_pos is not None: mask_tokens = tf.broadcast_to(self.mask_token, (batch_size, seq_len, projection_dim)) # replace the masked visual tokens by mask_tokens w = bool_masked_pos[..., None] w = tf.cast(w, mask_tokens.dtype) # since TF doesn't support eager tensor assignment embeddings = embeddings * (1 - w) + mask_tokens * w embeddings = tf.concat([cls_tokens, embeddings], axis=1) if self.position_embeddings is not None: embeddings = embeddings + self.position_embeddings embeddings = self.dropout(embeddings) return embeddings class TFData2VecVisionPatchEmbeddings(tf.keras.layers.Layer): """ Image to Patch Embedding. """ def __init__(self, config: Data2VecVisionConfig, **kwargs): super().__init__(**kwargs) self.config = config image_size, patch_size = config.image_size, config.patch_size num_channels, hidden_size = config.num_channels, config.hidden_size image_size = image_size if isinstance(image_size, collections.abc.Iterable) else (image_size, image_size) patch_size = patch_size if isinstance(patch_size, collections.abc.Iterable) else (patch_size, patch_size) num_patches = (image_size[1] // patch_size[1]) * (image_size[0] // patch_size[0]) patch_shape = (image_size[0] // patch_size[0], image_size[1] // patch_size[1]) self.image_size = image_size self.patch_size = patch_size self.num_patches = num_patches self.patch_shape = patch_shape self.num_channels = num_channels self.projection = tf.keras.layers.Conv2D( filters=hidden_size, kernel_size=patch_size, strides=patch_size, padding="valid", data_format="channels_last", kernel_initializer="glorot_uniform", # following torch.nn.Linear bias_initializer="zeros", name="projection", ) def call(self, pixel_values: tf.Tensor, training: bool = False) -> tf.Tensor: batch_size, num_channels, height, width = shape_list(pixel_values) if tf.executing_eagerly(): if num_channels != self.num_channels: raise ValueError( "Make sure that the channel dimension of the pixel values match with the one set in the" " configuration." ) if height != self.image_size[0] or width != self.image_size[1]: raise ValueError( f"Input image size ({height}*{width}) doesn't match model" f" ({self.image_size[0]}*{self.image_size[1]})." ) # When running on CPU, `tf.keras.layers.Conv2D` doesn't support `NCHW` format. # So change the input format from `NCHW` to `NHWC`. # shape = (batch_size, in_height, in_width, in_channels=num_channels) pixel_values = tf.transpose(pixel_values, perm=(0, 2, 3, 1)) projection = self.projection(pixel_values) # Change the 2D spatial dimensions to a single temporal dimension. # shape = (batch_size, num_patches, out_channels=embed_dim) num_patches = (width // self.patch_size[1]) * (height // self.patch_size[0]) return tf.reshape(tensor=projection, shape=(batch_size, num_patches, -1)) class TFData2VecVisionSelfAttention(tf.keras.layers.Layer): def __init__(self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None, **kwargs): super().__init__(**kwargs) if config.hidden_size % config.num_attention_heads != 0: raise ValueError( f"The hidden size ({config.hidden_size}) is not a multiple of the number " f"of attention heads ({config.num_attention_heads})" ) self.num_attention_heads = config.num_attention_heads self.attention_head_size = int(config.hidden_size / config.num_attention_heads) self.all_head_size = self.num_attention_heads * self.attention_head_size self.sqrt_att_head_size = math.sqrt(self.attention_head_size) self.query = tf.keras.layers.Dense( units=self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="query" ) self.key = tf.keras.layers.Dense( units=self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="key", use_bias=False, ) self.value = tf.keras.layers.Dense( units=self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="value" ) self.dropout = tf.keras.layers.Dropout(rate=config.attention_probs_dropout_prob) if window_size: self.relative_position_bias = TFData2VecVisionRelativePositionBias( config, window_size=window_size, name="relative_position_bias" ) else: self.relative_position_bias = None def transpose_for_scores(self, tensor: tf.Tensor, batch_size: int) -> tf.Tensor: # Reshape from [batch_size, seq_length, all_head_size] to [batch_size, seq_length, num_attention_heads, attention_head_size] tensor = tf.reshape(tensor=tensor, shape=(batch_size, -1, self.num_attention_heads, self.attention_head_size)) # Transpose the tensor from [batch_size, seq_length, num_attention_heads, attention_head_size] to [batch_size, num_attention_heads, seq_length, attention_head_size] return tf.transpose(tensor, perm=[0, 2, 1, 3]) def call( self, hidden_states: tf.Tensor, head_mask: tf.Tensor, output_attentions: bool, relative_position_bias: Optional["TFData2VecVisionRelativePositionBias"] = None, training: bool = False, ) -> Tuple[tf.Tensor]: batch_size = shape_list(hidden_states)[0] mixed_query_layer = self.query(inputs=hidden_states) mixed_key_layer = self.key(inputs=hidden_states) mixed_value_layer = self.value(inputs=hidden_states) query_layer = self.transpose_for_scores(mixed_query_layer, batch_size) key_layer = self.transpose_for_scores(mixed_key_layer, batch_size) value_layer = self.transpose_for_scores(mixed_value_layer, batch_size) # Take the dot product between "query" and "key" to get the raw attention scores. # (batch size, num_heads, seq_len_q, seq_len_k) attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True) attention_scores = attention_scores / self.sqrt_att_head_size # Add relative position bias if present. if self.relative_position_bias is not None: # Passing `0.0` to the `relative_position_bias()` layer because otherwise Keras # might complain about `Layer.call()` not being invoked properly. In this case this input # i.e., 0.0 is not going to be used in any calculations so we're safe. attention_scores = attention_scores + self.relative_position_bias(0.0)[None, ...] # Add shared relative position bias if provided. if relative_position_bias is not None: attention_scores = attention_scores + relative_position_bias # Normalize the attention scores to probabilities. attention_probs = stable_softmax(logits=attention_scores, axis=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(inputs=attention_probs, training=training) # Mask heads if we want to if head_mask is not None: attention_probs = tf.multiply(attention_probs, head_mask) attention_output = tf.matmul(attention_probs, value_layer) attention_output = tf.transpose(attention_output, perm=[0, 2, 1, 3]) # (batch_size, seq_len_q, all_head_size) attention_output = tf.reshape(tensor=attention_output, shape=(batch_size, -1, self.all_head_size)) outputs = (attention_output, attention_probs) if output_attentions else (attention_output,) return outputs class TFData2VecVisionSelfOutput(tf.keras.layers.Layer): """ The residual connection is defined in TFData2VecVisionLayer instead of here (as is the case with other models), due to the layernorm applied before each block. """ def __init__(self, config: Data2VecVisionConfig, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) self.dropout = tf.keras.layers.Dropout(rate=config.hidden_dropout_prob) def call(self, hidden_states: tf.Tensor, input_tensor: tf.Tensor, gamma=None, training: bool = False) -> tf.Tensor: hidden_states = self.dense(inputs=hidden_states) hidden_states = self.dropout(inputs=hidden_states, training=training) return hidden_states class TFData2VecVisionAttention(tf.keras.layers.Layer): def __init__(self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None, **kwargs): super().__init__(**kwargs) self.attention = TFData2VecVisionSelfAttention(config, window_size=window_size, name="attention") self.dense_output = TFData2VecVisionSelfOutput(config, name="output") def prune_heads(self, heads): raise NotImplementedError def call( self, input_tensor: tf.Tensor, head_mask: tf.Tensor, output_attentions: bool, relative_position_bias: Optional["TFData2VecVisionRelativePositionBias"] = None, training: bool = False, ) -> Tuple[tf.Tensor]: self_outputs = self.attention( hidden_states=input_tensor, head_mask=head_mask, output_attentions=output_attentions, relative_position_bias=relative_position_bias, training=training, ) attention_output = self.dense_output( hidden_states=self_outputs[0], input_tensor=input_tensor, training=training ) outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them return outputs # Copied from transformers.models.vit.modeling_tf_vit.TFViTIntermediate with ViT->Data2VecVision class TFData2VecVisionIntermediate(tf.keras.layers.Layer): def __init__(self, config: Data2VecVisionConfig, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) if isinstance(config.hidden_act, str): self.intermediate_act_fn = get_tf_activation(config.hidden_act) else: self.intermediate_act_fn = config.hidden_act def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.dense(inputs=hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states class TFData2VecVisionOutput(tf.keras.layers.Layer): def __init__(self, config: Data2VecVisionConfig, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) self.dropout = tf.keras.layers.Dropout(rate=config.hidden_dropout_prob) def call(self, hidden_states: tf.Tensor, training: bool = False) -> tf.Tensor: hidden_states = self.dense(inputs=hidden_states) hidden_states = self.dropout(inputs=hidden_states, training=training) return hidden_states class TFData2VecVisionLayer(tf.keras.layers.Layer): """This corresponds to the Block class in the timm implementation.""" def __init__( self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None, drop_path_rate: float = 0.0, **kwargs ): super().__init__(**kwargs) self.config = config self.attention = TFData2VecVisionAttention(config, window_size=window_size, name="attention") self.intermediate = TFData2VecVisionIntermediate(config, name="intermediate") self.data2vec_output = TFData2VecVisionOutput(config, name="output") self.layernorm_before = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name="layernorm_before" ) self.layernorm_after = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name="layernorm_after" ) # Using `layers.Activation` instead of `tf.identity` to better control `training` # behaviour. self.drop_path = ( TFData2VecVisionDropPath(drop_path_rate, name="drop_path") if drop_path_rate > 0.0 else tf.keras.layers.Activation("linear", name="drop_path") ) self.init_values = config.layer_scale_init_value def build(self, input_shape: tf.TensorShape): if self.init_values > 0: self.lambda_1 = self.add_weight( shape=(self.config.hidden_size), initializer="ones", trainable=True, name="lambda_1", ) self.lambda_2 = self.add_weight( shape=(self.config.hidden_size), initializer="ones", trainable=True, name="lambda_2", ) self.lambda_1.assign(self.init_values * tf.ones((self.config.hidden_size))) self.lambda_2.assign(self.init_values * tf.ones((self.config.hidden_size))) else: self.lambda_1, self.lambda_2 = None, None super().build(input_shape) def call( self, hidden_states: tf.Tensor, head_mask: tf.Tensor, output_attentions: bool, relative_position_bias: Optional["TFData2VecVisionRelativePositionBias"] = None, training: bool = False, ) -> Tuple[tf.Tensor]: self_attention_outputs = self.attention( # in Data2VecVision, layernorm is applied before self-attention input_tensor=self.layernorm_before(inputs=hidden_states), head_mask=head_mask, output_attentions=output_attentions, relative_position_bias=relative_position_bias, training=training, ) attention_output = self_attention_outputs[0] outputs = self_attention_outputs[1:] # add self attentions if we output attention weights # apply lambda_1 if present if self.lambda_1 is not None: attention_output = self.lambda_1 * attention_output # first residual connection hidden_states = self.drop_path(attention_output) + hidden_states # in Data2VecVision, layernorm is also applied after self-attention layer_output = self.layernorm_after(hidden_states) layer_output = self.intermediate(layer_output) layer_output = self.data2vec_output(layer_output) if self.lambda_2 is not None: layer_output = self.lambda_2 * layer_output # second residual connection layer_output = self.drop_path(layer_output) + hidden_states outputs = (layer_output,) + outputs return outputs # Taken and modified from here: # https://github.com/leondgarse/keras_cv_attention_models/blob/main/keras_cv_attention_models/beit/beit.py#L28 class TFData2VecVisionRelativePositionBias(tf.keras.layers.Layer): def __init__(self, config: Data2VecVisionConfig, window_size: tuple, **kwargs) -> None: super().__init__(**kwargs) self.config = config self.window_size = window_size # +3 for cls_token_pos_len # window_size can be something like (14, 14) self.num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3 self.relative_position_index = self.get_position_index() def build(self, input_shape): self.relative_position_bias_table = self.add_weight( shape=(self.num_relative_distance, self.config.num_attention_heads), initializer="zeros", trainable=True, name="relative_position_bias_table", ) # [2*Wh-1 * 2*Ww-1, nH] # cls to token & token 2 cls & cls to cls super().build(input_shape) def get_position_index(self): # get pair-wise relative position index for each token inside the window xx, yy = tf.meshgrid(range(self.window_size[0]), range(self.window_size[1])) coords = tf.stack([yy, xx], axis=0) # [2, Wh, Ww] coords_flatten = tf.reshape(coords, [2, -1]) # [2, Wh*Ww] relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # [2, Wh*Ww, Wh*Ww] relative_coords = tf.transpose(relative_coords, perm=[1, 2, 0]) # [Wh*Ww, Wh*Ww, 2] xx = (relative_coords[:, :, 0] + self.window_size[0] - 1) * (2 * self.window_size[1] - 1) yy = relative_coords[:, :, 1] + self.window_size[1] - 1 relative_coords = tf.stack([xx, yy], axis=-1) relative_position_index = tf.reduce_sum(relative_coords, axis=-1) # [Wh*Ww, Wh*Ww] top = tf.ones((1, relative_position_index.shape[1]), dtype=relative_position_index.dtype) * ( self.num_relative_distance - 3 ) left = tf.ones((relative_position_index.shape[0], 1), dtype=relative_position_index.dtype) * ( self.num_relative_distance - 2 ) corner = tf.ones((1, 1), dtype=relative_position_index.dtype) * (self.num_relative_distance - 1) left_corner = tf.concat([corner, left], axis=0) relative_position_index = tf.concat([top, relative_position_index], axis=0) relative_position_index = tf.concat([left_corner, relative_position_index], axis=1) # [Wh*Ww + 1, Wh*Ww + 1] return relative_position_index def call(self, inputs=None) -> tf.Tensor: relative_position_bias = tf.gather(self.relative_position_bias_table, self.relative_position_index, axis=0) return tf.transpose(relative_position_bias, [2, 0, 1]) class TFData2VecVisionEncoder(tf.keras.layers.Layer): def __init__(self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None, **kwargs): super().__init__(**kwargs) self.config = config if config.use_shared_relative_position_bias: self.relative_position_bias = TFData2VecVisionRelativePositionBias( config, window_size=window_size, name="relative_position_bias" ) else: self.relative_position_bias = None # stochastic depth decay rule dpr = list(tf.linspace(0.0, config.drop_path_rate, config.num_hidden_layers)) self.layer = [ TFData2VecVisionLayer( config, window_size=window_size if config.use_relative_position_bias else None, drop_path_rate=dpr[i], name=f"layer_._{i}", ) for i in range(config.num_hidden_layers) ] def call( self, hidden_states: tf.Tensor, head_mask: Optional[tf.Tensor] = None, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, ) -> Union[tuple, TFBaseModelOutput]: all_hidden_states = () if output_hidden_states else None all_self_attentions = () if output_attentions else None for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_head_mask = head_mask[i] if head_mask is not None else None # Passing `0.0` to the `relative_position_bias()` layer because otherwise Keras # might complain about `Layer.call()` not being invoked properly. In this case this input # i.e., 0.0 is not going to be used in any calculations so we're safe. relative_position_bias = ( self.relative_position_bias(0.0) if self.relative_position_bias is not None else None ) layer_outputs = layer_module(hidden_states, layer_head_mask, output_attentions, relative_position_bias) hidden_states = layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if v is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable class TFData2VecVisionMainLayer(tf.keras.layers.Layer): config_class = Data2VecVisionConfig def __init__(self, config: Data2VecVisionConfig, add_pooling_layer: bool = True, **kwargs): super().__init__(**kwargs) self.config = config self.add_pooling_layer = add_pooling_layer self.embeddings = TFData2VecVisionEmbeddings(config, name="embeddings") self.encoder = TFData2VecVisionEncoder( config, window_size=self.embeddings.patch_embeddings.patch_shape, name="encoder" ) self.layernorm = ( tf.identity if config.use_mean_pooling else tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="layernorm") ) # We are setting the `data_format` like so because from here on we will revert to the # NCHW output format self.pooler = TFData2VecVisionPooler(config, name="pooler") if add_pooling_layer else None def get_input_embeddings(self) -> tf.keras.layers.Layer: return self.embeddings.patch_embeddings def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ raise NotImplementedError @unpack_inputs def call( self, pixel_values: Optional[tf.Tensor] = None, bool_masked_pos: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: bool = False, ) -> Union[tuple, TFData2VecVisionModelOutputWithPooling]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if pixel_values is None: raise ValueError("You have to specify pixel_values") # Prepare head mask if needed # 1.0 in head_mask indicate we keep the head # attention_probs has shape bsz x n_heads x N x N # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] if head_mask is not None: raise NotImplementedError else: head_mask = [None] * self.config.num_hidden_layers embedding_output = self.embeddings(pixel_values, bool_masked_pos, training=training) encoder_outputs = self.encoder( embedding_output, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) sequence_output = encoder_outputs[0] sequence_output = self.layernorm(sequence_output) pooled_output = self.pooler(sequence_output) if self.pooler is not None else None if not return_dict: head_outputs = (sequence_output, pooled_output) if pooled_output is not None else (sequence_output,) return head_outputs + encoder_outputs[1:] return TFData2VecVisionModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class TFData2VecVisionPooler(tf.keras.layers.Layer): def __init__(self, config: Data2VecVisionConfig, **kwargs): super().__init__(**kwargs) self.layernorm = ( tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="layernorm") if config.use_mean_pooling else None ) def call(self, hidden_states: tf.Tensor) -> tf.Tensor: if self.layernorm is not None: # Mean pool the final hidden states of the patch tokens patch_tokens = hidden_states[:, 1:, :] pooled_output = self.layernorm(tf.reduce_mean(patch_tokens, axis=1)) else: # Pool by simply taking the final hidden state of the [CLS] token pooled_output = hidden_states[:, 0] return pooled_output class TFData2VecVisionPreTrainedModel(TFPreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = Data2VecVisionConfig base_model_prefix = "data2vec_vision" main_input_name = "pixel_values" _keys_to_ignore_on_load_unexpected = [r"relative_position_index"] @property def dummy_inputs(self) -> Dict[str, tf.Tensor]: """ Dummy inputs to build the network. Returns: `Dict[str, tf.Tensor]`: The dummy inputs. """ VISION_DUMMY_INPUTS = tf.random.uniform( shape=(3, self.config.num_channels, self.config.image_size, self.config.image_size), dtype=tf.float32, ) return {"pixel_values": tf.constant(VISION_DUMMY_INPUTS)} @tf.function( input_signature=[ { "pixel_values": tf.TensorSpec((None, None, None, None), tf.float32, name="pixel_values"), } ] ) def serving(self, inputs): """ Method used for serving the model. Args: inputs (`Dict[str, tf.Tensor]`): The input of the saved model as a dictionary of tensors. """ output = self.call(inputs) return self.serving_output(output) DATA2VEC_VISION_START_DOCSTRING = r""" This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.). This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage and behavior. <Tip> TensorFlow models and layers in `transformers` accept two formats as input: - having all inputs as keyword arguments (like PyTorch models), or - having all inputs as a list, tuple or dict in the first positional argument. The reason the second format is supported is that Keras methods prefer this format when passing inputs to models and layers. Because of this support, when using methods like `model.fit()` things should "just work" for you - just pass your inputs and labels in any format that `model.fit()` supports! If, however, you want to use the second format outside of Keras methods like `fit()` and `predict()`, such as when creating your own layers or models with the Keras `Functional` API, there are three possibilities you can use to gather all the input Tensors in the first positional argument: - a single Tensor with `pixel_values` only and nothing else: `model(pixel_values)` - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring: `model([pixel_values, attention_mask])` or `model([pixel_values, attention_mask, token_type_ids])` - a dictionary with one or several input Tensors associated to the input names given in the docstring: `model({"pixel_values": pixel_values, "token_type_ids": token_type_ids})` Note that when creating models and layers with [subclassing](https://keras.io/guides/making_new_layers_and_models_via_subclassing/) then you don't need to worry about any of this, as you can just pass inputs like you would to any other Python function! </Tip> Args: config ([`Data2VecVisionConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~TFPreTrainedModel.from_pretrained`] method to load the model weights. """ DATA2VEC_VISION_INPUTS_DOCSTRING = r""" Args: pixel_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` `Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example must have the shape `(batch_size, num_channels, height, width)`): Pixel values. Pixel values can be obtained using [`AutoImageProcessor`]. See [`BeitImageProcessor.__call__`] for details. head_mask (`np.ndarray` or `tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple. This argument can be used in eager mode, in graph mode the value will always be set to True. training (`bool`, *optional*, defaults to `False``): Whether or not to use the model in training mode (some modules like dropout modules have different behaviors between training and evaluation). """ @add_start_docstrings( "The bare Data2VecVision Model transformer outputting raw hidden-states without any specific head on top.", DATA2VEC_VISION_START_DOCSTRING, ) class TFData2VecVisionModel(TFData2VecVisionPreTrainedModel): def __init__(self, config: Data2VecVisionConfig, add_pooling_layer: bool = False, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.config = config self.data2vec_vision = TFData2VecVisionMainLayer( config, add_pooling_layer=add_pooling_layer, name="data2vec_vision" ) def get_input_embeddings(self): return self.data2vec_vision.get_input_embeddings() @unpack_inputs @add_start_docstrings_to_model_forward(DATA2VEC_VISION_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFData2VecVisionModelOutputWithPooling, config_class=_CONFIG_FOR_DOC, modality="vision", expected_output=_EXPECTED_OUTPUT_SHAPE, ) def call( self, pixel_values: Optional[TFModelInputType] = None, bool_masked_pos: Optional[tf.Tensor] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: bool = False, ) -> Union[tuple, TFData2VecVisionModelOutputWithPooling]: outputs = self.data2vec_vision( pixel_values=pixel_values, bool_masked_pos=bool_masked_pos, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) return outputs def serving_output(self, output: TFData2VecVisionModelOutputWithPooling) -> TFData2VecVisionModelOutputWithPooling: hidden_states = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attentions = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFData2VecVisionModelOutputWithPooling( last_hidden_state=output.last_hidden_state, pooler_output=output.pooler_output, hidden_states=hidden_states, attentions=attentions, ) @add_start_docstrings( """ Data2VecVision Model transformer with an image classification head on top (a linear layer on top of the average of the final hidden states of the patch tokens) e.g. for ImageNet. """, DATA2VEC_VISION_START_DOCSTRING, ) class TFData2VecVisionForImageClassification(TFData2VecVisionPreTrainedModel, TFSequenceClassificationLoss): def __init__(self, config: Data2VecVisionConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.num_labels = config.num_labels self.data2vec_vision = TFData2VecVisionMainLayer(config, add_pooling_layer=True, name="data2vec_vision") # Classifier head self.classifier = tf.keras.layers.Dense( units=config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="classifier", ) @unpack_inputs @add_start_docstrings_to_model_forward(DATA2VEC_VISION_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_IMAGE_CLASS_CHECKPOINT, output_type=TFSequenceClassifierOutput, config_class=_CONFIG_FOR_DOC, expected_output=_IMAGE_CLASS_EXPECTED_OUTPUT, ) def call( self, pixel_values: Optional[TFModelInputType] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[Union[np.ndarray, tf.Tensor]] = None, training: Optional[bool] = False, ) -> Union[TFSequenceClassifierOutput, tuple]: r""" labels (`tf.Tensor` or `np.ndarray` of shape `(batch_size,)`, *optional*): Labels for computing the image classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.data2vec_vision( pixel_values=pixel_values, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) pooled_output = outputs.pooler_output if return_dict else outputs[1] logits = self.classifier(pooled_output) loss = None if labels is None else self.hf_compute_loss(labels=labels, logits=logits) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return TFSequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output: TFSequenceClassifierOutput) -> TFSequenceClassifierOutput: hidden_states = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attentions = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFSequenceClassifierOutput(logits=output.logits, hidden_states=hidden_states, attentions=attentions) class TFData2VecVisionConvModule(tf.keras.layers.Layer): """ A convolutional block that bundles conv/norm/activation layers. This block simplifies the usage of convolution layers, which are commonly used with a norm layer (e.g., BatchNorm) and activation layer (e.g., ReLU). Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__( self, out_channels: int, kernel_size: Union[int, Tuple[int, int]], padding: str = "valid", bias: bool = False, dilation: Union[int, Tuple[int, int]] = 1, **kwargs, ) -> None: super().__init__(**kwargs) self.conv = tf.keras.layers.Conv2D( filters=out_channels, kernel_size=kernel_size, padding=padding, use_bias=bias, dilation_rate=dilation, name="conv", ) self.bn = tf.keras.layers.BatchNormalization(name="bn", momentum=0.9, epsilon=1e-5) self.activation = tf.nn.relu def call(self, input: tf.Tensor) -> tf.Tensor: output = self.conv(input) output = self.bn(output) output = self.activation(output) return output # Copied from: # https://gist.github.com/Rocketknight1/43abbe6e73f1008e6e459486e01e0ceb class TFAdaptiveAvgPool1D(tf.keras.layers.Layer): def __init__(self, output_dim, mode="dense", **kwargs): super().__init__(**kwargs) self.output_dim = output_dim self.mode = mode self.map = None def build(self, input_shape): super().build(input_shape) """We pre-compute the sparse matrix for the build() step once. The below code comes from https://stackoverflow.com/questions/53841509/how-does-adaptive-pooling-in-pytorch-work/63603993#63603993.""" def get_kernels(ind, outd) -> List: """Returns a List [(kernel_offset_start,kernel_length)] defining all the pooling kernels for a 1-D adaptive pooling layer that takes an input of dimension `ind` and yields an output of dimension `outd`""" def start_index(a, b, c): return math.floor((float(a) * float(c)) / b) def end_index(a, b, c): return math.ceil((float(a + 1) * float(c)) / b) results = [] for ow in range(outd): start = start_index(ow, outd, ind) end = end_index(ow, outd, ind) sz = end - start results.append((start, sz)) return results in_dim = int(input_shape[-1]) kernels = get_kernels(in_dim, self.output_dim) sparse_map = np.zeros((in_dim, self.output_dim), dtype=np.float32) for i, kernel in enumerate(kernels): sparse_map[kernel[0] : kernel[0] + kernel[1], i] = 1 / kernel[1] if self.mode == "dense": self.map = tf.constant(sparse_map) else: self.map = tf.sparse.from_dense(sparse_map) def call(self, inputs): if self.mode == "dense": return inputs @ self.map else: input_dims = inputs.shape input_matrix = tf.reshape(inputs, (-1, input_dims[-1])) out = tf.sparse.sparse_dense_matmul(input_matrix, self.map) return tf.reshape(out, input_dims[:-1].as_list() + [-1]) def get_config(self): config = super().get_config() config.update({"output_dim": self.output_dim, "mode": self.mode}) return config class TFAdaptiveAvgPool2D(tf.keras.layers.Layer): def __init__(self, output_shape, mode="dense", **kwargs): super().__init__(**kwargs) self.mode = mode self.h_pool = TFAdaptiveAvgPool1D(output_shape[0], mode=mode, name="h_pool") self.w_pool = TFAdaptiveAvgPool1D(output_shape[1], mode=mode, name="w_pool") def call(self, inputs): # Rearrange from NHWC -> NCHW inputs = tf.transpose(inputs, perm=[0, 3, 1, 2]) # Perform W-pooling inputs = self.w_pool(inputs) # Rearrange NCHW -> NCWH inputs = tf.transpose(inputs, perm=[0, 1, 3, 2]) # Perform H-pooling inputs = self.h_pool(inputs) # Rearrange from NCWH -> NHWC inputs = tf.transpose(inputs, perm=[0, 3, 2, 1]) return inputs def get_config(self): config = super().get_config() config.update({"mode": self.mode}) return config class TFData2VecVisionPyramidPoolingModule(tf.keras.layers.Layer): """ Pyramid Pooling Module (PPM) used in PSPNet. Args: pool_scales (tuple[int]): Pooling scales used in Pooling Pyramid Module. channels (int): Channels after modules, before conv_seg. Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__(self, pool_scales: Tuple[int, ...], channels: int, **kwargs) -> None: super().__init__(**kwargs) self.pool_scales = pool_scales self.channels = channels self.layer_list = [] for idx, pool_scale in enumerate(pool_scales): pool_scale = pool_scale if isinstance(pool_scale, collections.abc.Iterable) else (pool_scale, pool_scale) self.layer_list.append( [ TFAdaptiveAvgPool2D(output_shape=pool_scale), TFData2VecVisionConvModule(out_channels=self.channels, kernel_size=1, name=f"{idx}.1"), ] ) def call(self, x: tf.Tensor) -> List[tf.Tensor]: ppm_outs = [] inputs = x for ppm in self.layer_list: for layer_module in ppm: ppm_out = layer_module(x) x = ppm_out upsampled_ppm_out = tf.image.resize(ppm_out, size=shape_list(inputs)[1:-1], method="bilinear") ppm_outs.append(upsampled_ppm_out) return ppm_outs class TFData2VecVisionUperHead(tf.keras.layers.Layer): """ Unified Perceptual Parsing for Scene Understanding. This head is the implementation of [UPerNet](https://arxiv.org/abs/1807.10221). Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__(self, config: Data2VecVisionConfig, **kwargs) -> None: super().__init__(**kwargs) self.pool_scales = config.pool_scales # e.g. (1, 2, 3, 6) self.in_channels = [config.hidden_size] * 4 # e.g. [768, 768, 768, 768] self.channels = config.hidden_size self.classifier = tf.keras.layers.Conv2D(config.num_labels, kernel_size=1, name="classifier") # PSP Module self.psp_modules = TFData2VecVisionPyramidPoolingModule(self.pool_scales, self.channels, name="psp_modules") self.bottleneck = TFData2VecVisionConvModule(self.channels, kernel_size=3, padding="same", name="bottleneck") # FPN Module self.lateral_convs = [] self.fpn_convs = [] for idx, _ in enumerate(self.in_channels[:-1]): # skip the top layer l_conv = TFData2VecVisionConvModule(out_channels=self.channels, kernel_size=1, name=f"lateral_convs.{idx}") fpn_conv = TFData2VecVisionConvModule( out_channels=self.channels, kernel_size=3, padding="same", name=f"fpn_convs.{idx}" ) self.lateral_convs.append(l_conv) self.fpn_convs.append(fpn_conv) self.fpn_bottleneck = TFData2VecVisionConvModule( out_channels=self.channels, kernel_size=3, padding="same", name="fpn_bottleneck" ) def psp_forward(self, inputs): x = inputs[-1] psp_outs = [x] psp_outs.extend(self.psp_modules(x)) psp_outs = tf.concat(psp_outs, axis=-1) output = self.bottleneck(psp_outs) return output def call(self, encoder_hidden_states: tf.Tensor) -> tf.Tensor: # build laterals laterals = [lateral_conv(encoder_hidden_states[i]) for i, lateral_conv in enumerate(self.lateral_convs)] laterals.append(self.psp_forward(encoder_hidden_states)) # build top-down path used_backbone_levels = len(laterals) for i in range(used_backbone_levels - 1, 0, -1): prev_shape = shape_list(laterals[i - 1])[1:-1] laterals[i - 1] = laterals[i - 1] + tf.image.resize(laterals[i], size=prev_shape, method="bilinear") # build outputs fpn_outs = [self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels - 1)] # append psp feature fpn_outs.append(laterals[-1]) for i in range(used_backbone_levels - 1, 0, -1): fpn_outs[i] = tf.image.resize(fpn_outs[i], size=shape_list(fpn_outs[0])[1:-1], method="bilinear") fpn_outs = tf.concat(fpn_outs, axis=-1) output = self.fpn_bottleneck(fpn_outs) output = self.classifier(output) return output class TFData2VecVisionFCNHead(tf.keras.layers.Layer): """ Fully Convolution Networks for Semantic Segmentation. This head is implemented from [FCNNet](https://arxiv.org/abs/1411.4038). Args: config (Data2VecVisionConfig): Configuration. kernel_size (int): The kernel size for convs in the head. Default: 3. dilation (int): The dilation rate for convs in the head. Default: 1. Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__( self, config: Data2VecVisionConfig, in_index: int = 2, kernel_size: int = 3, dilation: Union[int, Tuple[int, int]] = 1, **kwargs, ) -> None: super().__init__(**kwargs) self.in_channels = config.hidden_size self.channels = config.auxiliary_channels self.num_convs = config.auxiliary_num_convs self.concat_input = config.auxiliary_concat_input self.in_index = in_index convs = [] convs.append( TFData2VecVisionConvModule( out_channels=self.channels, kernel_size=kernel_size, padding="same", dilation=dilation, name="convs.0", ) ) for i in range(self.num_convs - 1): convs.append( TFData2VecVisionConvModule( out_channels=self.channels, kernel_size=kernel_size, padding="same", dilation=dilation, name=f"conv_module_{i+2}", ) ) if self.num_convs == 0: self.convs = [tf.identity] else: self.convs = convs if self.concat_input: self.conv_cat = TFData2VecVisionConvModule( out_channels=self.channels, kernel_size=kernel_size, padding="same", name="conv_cat" ) self.classifier = tf.keras.layers.Conv2D(config.num_labels, kernel_size=1, name="classifier") def call(self, encoder_hidden_states: tf.Tensor) -> tf.Tensor: # just take the relevant feature maps hidden_states = encoder_hidden_states[self.in_index] output = hidden_states for layer_module in self.convs: output = layer_module(output) if self.concat_input: output = self.conv_cat(tf.concat([hidden_states, output], axis=-1)) output = self.classifier(output) return output @add_start_docstrings( """ Data2VecVision Model transformer with a semantic segmentation head on top e.g. for ADE20k, CityScapes. """, DATA2VEC_VISION_START_DOCSTRING, ) class TFData2VecVisionForSemanticSegmentation(TFData2VecVisionPreTrainedModel): def __init__(self, config: Data2VecVisionConfig, *inputs, **kwargs) -> None: super().__init__(config, *inputs, **kwargs) self.num_labels = config.num_labels self.data2vec_vision = TFData2VecVisionMainLayer(config, add_pooling_layer=False, name="data2vec_vision") # FPNs self.fpn1 = [ tf.keras.layers.Conv2DTranspose(config.hidden_size, kernel_size=2, strides=2, name="fpn1.0"), tf.keras.layers.BatchNormalization(name="fpn1.1", momentum=0.9, epsilon=1e-5), tf.keras.layers.Activation("gelu"), tf.keras.layers.Conv2DTranspose(config.hidden_size, kernel_size=2, strides=2, name="fpn1.3"), ] self.fpn2 = [tf.keras.layers.Conv2DTranspose(config.hidden_size, kernel_size=2, strides=2, name="fpn2.0")] self.fpn3 = tf.identity self.fpn4 = tf.keras.layers.MaxPool2D(pool_size=2, strides=2) # Semantic segmentation head(s) self.decode_head = TFData2VecVisionUperHead(config, name="decode_head") self.auxiliary_head = ( TFData2VecVisionFCNHead(config, name="auxiliary_head") if config.use_auxiliary_head else None ) def compute_loss(self, logits, auxiliary_logits, labels): # upsample logits to the images' original size if len(shape_list(labels)) > 3: label_interp_shape = shape_list(labels)[1:-1] else: label_interp_shape = shape_list(labels)[-2:] upsampled_logits = tf.image.resize(logits, size=label_interp_shape, method="bilinear") if auxiliary_logits is not None: upsampled_auxiliary_logits = tf.image.resize(auxiliary_logits, size=label_interp_shape, method="bilinear") # compute weighted loss loss_fct = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True, reduction="none") # Copied from https://www.tensorflow.org/text/tutorials/transformer#loss_and_metrics. # Utility to mask the index to ignore during computing the loss. def masked_loss(real, pred): mask = tf.math.logical_not(tf.math.equal(real, self.config.semantic_loss_ignore_index)) loss_ = loss_fct(real, pred) mask = tf.cast(mask, dtype=loss_.dtype) loss_ *= mask reduced_masked_loss = tf.reduce_sum(loss_) / tf.reduce_sum(mask) return tf.reshape(reduced_masked_loss, (1,)) main_loss = masked_loss(labels, upsampled_logits) auxiliary_loss = masked_loss(labels, upsampled_auxiliary_logits) loss = main_loss + self.config.auxiliary_loss_weight * auxiliary_loss return loss @unpack_inputs @add_start_docstrings_to_model_forward(DATA2VEC_VISION_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFSemanticSegmenterOutput, config_class=_CONFIG_FOR_DOC) def call( self, pixel_values: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None, labels: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[tuple, TFSemanticSegmenterOutput]: r""" labels (`tf.Tensor` of shape `(batch_size, height, width)`, *optional*): Ground truth semantic segmentation maps for computing the loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels > 1`, a classification loss is computed (Cross-Entropy). Returns: Examples: ```python >>> from transformers import AutoImageProcessor, TFData2VecVisionForSemanticSegmentation >>> from PIL import Image >>> import requests >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" >>> image = Image.open(requests.get(url, stream=True).raw) >>> image_processor = AutoImageProcessor.from_pretrained("facebook/data2vec-vision-base") >>> model = TFData2VecVisionForSemanticSegmentation.from_pretrained("facebook/data2vec-vision-base") >>> inputs = image_processor(images=image, return_tensors="pt") >>> outputs = model(**inputs) >>> # logits are of shape (batch_size, num_labels, height, width) >>> logits = outputs.logits ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) outputs = self.data2vec_vision( pixel_values, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=True, # we need the intermediate hidden states return_dict=return_dict, ) encoder_hidden_states = outputs.hidden_states if return_dict else outputs[1] # only keep certain features, and reshape # note that we do +1 as the encoder_hidden_states also includes the initial embeddings features = [feature for idx, feature in enumerate(encoder_hidden_states) if idx + 1 in self.config.out_indices] batch_size = shape_list(pixel_values)[0] patch_resolution = self.config.image_size // self.config.patch_size def reshape_features(x): x = tf.reshape(x, (batch_size, patch_resolution, patch_resolution, -1)) return x features = [reshape_features(x[:, 1:, :]) for x in features] # apply FPNs ops = [self.fpn1, self.fpn2, self.fpn3, self.fpn4] for module in ops[0]: features[0] = module(features[0]) features[1] = ops[1][0](features[1]) for i in range(len(features[2:])): features[i + 2] = ops[i + 2](features[i + 2]) logits = self.decode_head(features) # Tranpose the logits to maintain consistency in the output formats. transposed_logits = tf.transpose(logits, perm=[0, 3, 1, 2]) auxiliary_logits = None if self.auxiliary_head is not None: auxiliary_logits = self.auxiliary_head(features) loss = None if labels is not None: if self.config.num_labels == 1: raise ValueError("The number of labels should be greater than one") else: loss = self.compute_loss(logits, auxiliary_logits, labels) if not return_dict: if output_hidden_states: output = (logits,) + outputs[1:] else: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return TFSemanticSegmenterOutput( loss=loss, logits=transposed_logits, hidden_states=outputs.hidden_states if output_hidden_states else None, attentions=outputs.attentions, ) def serving_output(self, output: TFSemanticSegmenterOutput) -> TFSemanticSegmenterOutput: hidden_states = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attentions = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFSemanticSegmenterOutput(logits=output.logits, hidden_states=hidden_states, attentions=attentions)
27182812/ChatGLM-LLaMA-chinese-insturct
16,245
src/transformers/models/data2vec/configuration_data2vec_audio.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Data2VecText configuration""" import math from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) DATA2VEC_AUDIO_PRETRAINED_CONFIG_ARCHIVE_MAP = { "facebook/data2vec-base-960h": "https://huggingface.co/facebook/data2vec-audio-base-960h/resolve/main/config.json", # See all Data2VecAudio models at https://huggingface.co/models?filter=data2vec-audio } class Data2VecAudioConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`Data2VecAudioModel`]. It is used to instantiate an Data2VecAudio model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the Data2VecAudio [facebook/data2vec-audio-base-960h](https://huggingface.co/facebook/data2vec-audio-base-960h) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 32): Vocabulary size of the Data2VecAudio model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`Data2VecAudioModel`] or [`TFData2VecAudioModel`]. Vocabulary size of the model. Defines the different tokens that can be represented by the *inputs_ids* passed to the forward method of [`Data2VecAudioModel`]. hidden_size (`int`, *optional*, defaults to 768): Dimensionality of the encoder layers and the pooler layer. num_hidden_layers (`int`, *optional*, defaults to 12): Number of hidden layers in the Transformer encoder. num_attention_heads (`int`, *optional*, defaults to 12): Number of attention heads for each attention layer in the Transformer encoder. intermediate_size (`int`, *optional*, defaults to 3072): Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder. hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported. hidden_dropout (`float`, *optional*, defaults to 0.1): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. attention_dropout (`float`, *optional*, defaults to 0.1): The dropout ratio for the attention probabilities. final_dropout (`float`, *optional*, defaults to 0.1): The dropout probability for the final projection layer of [`Data2VecAudioForCTC`]. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. feat_proj_dropout (`float`, *optional*, defaults to 0.0): The dropout probability for output of the feature encoder. feat_extract_activation (`str, `optional`, defaults to `"gelu"`): The non-linear activation function (function or string) in the 1D convolutional layers of the feature extractor. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported. conv_dim (`Tuple[int]` or `List[int]`, *optional*, defaults to `(512, 512, 512, 512, 512, 512, 512)`): A tuple of integers defining the number of input and output channels of each 1D convolutional layer in the feature encoder. The length of *conv_dim* defines the number of 1D convolutional layers. conv_stride (`Tuple[int]` or `List[int]`, *optional*, defaults to `(5, 2, 2, 2, 2, 2, 2)`): A tuple of integers defining the stride of each 1D convolutional layer in the feature encoder. The length of *conv_stride* defines the number of convolutional layers and has to match the length of *conv_dim*. conv_kernel (`Tuple[int]` or `List[int]`, *optional*, defaults to `(10, 3, 3, 3, 3, 3, 3)`): A tuple of integers defining the kernel size of each 1D convolutional layer in the feature encoder. The length of *conv_kernel* defines the number of convolutional layers and has to match the length of *conv_dim*. conv_bias (`bool`, *optional*, defaults to `False`): Whether the 1D convolutional layers have a bias. num_conv_pos_embeddings (`int`, *optional*, defaults to 128): Number of convolutional positional embeddings. Defines the kernel size of 1D convolutional positional embeddings layer. num_conv_pos_embedding_groups (`int`, *optional*, defaults to 16): Number of groups of 1D convolutional positional embeddings layer. mask_time_prob (`float`, *optional*, defaults to 0.05): Percentage (between 0 and 1) of all feature vectors along the time axis which will be masked. The masking procecure generates ''mask_time_prob*len(time_axis)/mask_time_length'' independent masks over the axis. If reasoning from the propability of each feature vector to be chosen as the start of the vector span to be masked, *mask_time_prob* should be `prob_vector_start*mask_time_length`. Note that overlap may decrease the mask_time_length (`int`, *optional*, defaults to 10): Length of vector span along the time axis. mask_time_min_masks (`int`, *optional*, defaults to 2),: The minimum number of masks of length `mask_feature_length` generated along the time axis, each time step, irrespectively of `mask_feature_prob`. Only relevant if ''mask_time_prob*len(time_axis)/mask_time_length < mask_time_min_masks'' mask_feature_prob (`float`, *optional*, defaults to 0.0): Percentage (between 0 and 1) of all feature vectors along the feature axis which will be masked. The masking procecure generates ''mask_feature_prob*len(feature_axis)/mask_time_length'' independent masks over the axis. If reasoning from the propability of each feature vector to be chosen as the start of the vector span to be masked, *mask_feature_prob* should be `prob_vector_start*mask_feature_length`. Note that overlap may decrease the actual percentage of masked vectors. This is only relevant if `apply_spec_augment is True`. mask_feature_length (`int`, *optional*, defaults to 10): Length of vector span along the feature axis. mask_feature_min_masks (`int`, *optional*, defaults to 0),: The minimum number of masks of length `mask_feature_length` generated along the feature axis, each time step, irrespectively of `mask_feature_prob`. Only relevant if ''mask_feature_prob*len(feature_axis)/mask_feature_length < mask_feature_min_masks'' ctc_loss_reduction (`str`, *optional*, defaults to `"sum"`): Specifies the reduction to apply to the output of `torch.nn.CTCLoss`. Only relevant when training an instance of [`Data2VecAudioForCTC`]. ctc_zero_infinity (`bool`, *optional*, defaults to `False`): Whether to zero infinite losses and the associated gradients of `torch.nn.CTCLoss`. Infinite losses mainly occur when the inputs are too short to be aligned to the targets. Only relevant when training an instance of [`Data2VecAudioForCTC`]. use_weighted_layer_sum (`bool`, *optional*, defaults to `False`): Whether to use a weighted average of layer outputs with learned weights. Only relevant when using an instance of [`Data2VecAudioForSequenceClassification`]. classifier_proj_size (`int`, *optional*, defaults to 256): Dimensionality of the projection before token mean-pooling for classification. tdnn_dim (`Tuple[int]` or `List[int]`, *optional*, defaults to `(512, 512, 512, 512, 1500)`): A tuple of integers defining the number of output channels of each 1D convolutional layer in the *TDNN* module of the *XVector* model. The length of *tdnn_dim* defines the number of *TDNN* layers. tdnn_kernel (`Tuple[int]` or `List[int]`, *optional*, defaults to `(5, 3, 3, 1, 1)`): A tuple of integers defining the kernel size of each 1D convolutional layer in the *TDNN* module of the *XVector* model. The length of *tdnn_kernel* has to match the length of *tdnn_dim*. tdnn_dilation (`Tuple[int]` or `List[int]`, *optional*, defaults to `(1, 2, 3, 1, 1)`): A tuple of integers defining the dilation factor of each 1D convolutional layer in *TDNN* module of the *XVector* model. The length of *tdnn_dilation* has to match the length of *tdnn_dim*. xvector_output_dim (`int`, *optional*, defaults to 512): Dimensionality of the *XVector* embedding vectors. add_adapter (`bool`, *optional*, defaults to `False`): Whether a convolutional network should be stacked on top of the Data2VecAudio Encoder. Can be very useful for warm-starting Data2VecAudio for SpeechEncoderDecoder models. adapter_kernel_size (`int`, *optional*, defaults to 3): Kernel size of the convolutional layers in the adapter network. Only relevant if `add_adapter is True`. adapter_stride (`int`, *optional*, defaults to 2): Stride of the convolutional layers in the adapter network. Only relevant if `add_adapter is True`. num_adapter_layers (`int`, *optional*, defaults to 3): Number of convolutional layers that should be used in the adapter network. Only relevant if `add_adapter is True`. output_hidden_size (`int`, *optional*): Dimensionality of the encoder output layer. If not defined, this defaults to *hidden-size*. Only relevant if `add_adapter is True`. Example: ```python >>> from transformers import Data2VecAudioConfig, Data2VecAudioModel >>> # Initializing a Data2VecAudio facebook/data2vec-audio-base-960h style configuration >>> configuration = Data2VecAudioConfig() >>> # Initializing a model (with random weights) from the facebook/data2vec-audio-base-960h style configuration >>> model = Data2VecAudioModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "data2vec-audio" def __init__( self, vocab_size=32, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072, hidden_act="gelu", hidden_dropout=0.1, activation_dropout=0.1, attention_dropout=0.1, feat_proj_dropout=0.0, final_dropout=0.1, layerdrop=0.1, initializer_range=0.02, layer_norm_eps=1e-5, feat_extract_activation="gelu", conv_dim=(512, 512, 512, 512, 512, 512, 512), conv_stride=(5, 2, 2, 2, 2, 2, 2), conv_kernel=(10, 3, 3, 3, 3, 2, 2), conv_bias=False, num_conv_pos_embedding_groups=16, conv_pos_kernel_size=19, num_conv_pos_embeddings=5, mask_time_prob=0.05, mask_time_length=10, mask_time_min_masks=2, mask_feature_prob=0.0, mask_feature_length=10, mask_feature_min_masks=0, ctc_loss_reduction="sum", ctc_zero_infinity=False, use_weighted_layer_sum=False, classifier_proj_size=256, tdnn_dim=(512, 512, 512, 512, 1500), tdnn_kernel=(5, 3, 3, 1, 1), tdnn_dilation=(1, 2, 3, 1, 1), xvector_output_dim=512, pad_token_id=0, bos_token_id=1, eos_token_id=2, add_adapter=False, adapter_kernel_size=3, adapter_stride=2, num_adapter_layers=3, output_hidden_size=None, **kwargs, ): super().__init__(**kwargs, pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id) self.hidden_size = hidden_size self.feat_extract_activation = feat_extract_activation self.conv_dim = list(conv_dim) self.conv_stride = list(conv_stride) self.conv_kernel = list(conv_kernel) self.conv_bias = conv_bias self.num_conv_pos_embeddings = num_conv_pos_embeddings self.num_conv_pos_embedding_groups = num_conv_pos_embedding_groups self.conv_pos_kernel_size = conv_pos_kernel_size self.num_feat_extract_layers = len(self.conv_dim) self.num_hidden_layers = num_hidden_layers self.intermediate_size = intermediate_size self.hidden_act = hidden_act self.num_attention_heads = num_attention_heads self.hidden_dropout = hidden_dropout self.attention_dropout = attention_dropout self.activation_dropout = activation_dropout self.feat_proj_dropout = feat_proj_dropout self.final_dropout = final_dropout self.layerdrop = layerdrop self.layer_norm_eps = layer_norm_eps self.initializer_range = initializer_range self.vocab_size = vocab_size self.use_weighted_layer_sum = use_weighted_layer_sum if ( (len(self.conv_stride) != self.num_feat_extract_layers) or (len(self.conv_kernel) != self.num_feat_extract_layers) or (len(self.conv_dim) != self.num_feat_extract_layers) ): raise ValueError( "Configuration for convolutional layers is incorrect. It is required that `len(config.conv_dim)` ==" " `len(config.conv_stride)` == `len(config.conv_kernel)`, but is `len(config.conv_dim) =" f" {len(self.conv_dim)}`, `len(config.conv_stride) = {len(self.conv_stride)}`," f" `len(config.conv_kernel) = {len(self.conv_kernel)}`." ) # fine-tuning config parameters for SpecAugment: https://arxiv.org/abs/1904.08779 self.mask_time_prob = mask_time_prob self.mask_time_length = mask_time_length self.mask_time_min_masks = mask_time_min_masks self.mask_feature_prob = mask_feature_prob self.mask_feature_length = mask_feature_length self.mask_feature_min_masks = mask_feature_min_masks # ctc loss self.ctc_loss_reduction = ctc_loss_reduction self.ctc_zero_infinity = ctc_zero_infinity # adapter self.add_adapter = add_adapter self.adapter_kernel_size = adapter_kernel_size self.adapter_stride = adapter_stride self.num_adapter_layers = num_adapter_layers self.output_hidden_size = output_hidden_size or hidden_size # SequenceClassification-specific parameter. Feel free to ignore for other classes. self.classifier_proj_size = classifier_proj_size # XVector-specific parameters. Feel free to ignore for other classes. self.tdnn_dim = list(tdnn_dim) self.tdnn_kernel = list(tdnn_kernel) self.tdnn_dilation = list(tdnn_dilation) self.xvector_output_dim = xvector_output_dim @property def inputs_to_logits_ratio(self): return math.prod(self.conv_stride)
27182812/ChatGLM-LLaMA-chinese-insturct
7,420
src/transformers/models/data2vec/configuration_data2vec_text.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Data2VecText configuration""" from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging logger = logging.get_logger(__name__) DATA2VEC_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP = { "facebook/data2vec-text-base": "https://huggingface.co/data2vec/resolve/main/config.json", } class Data2VecTextConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`Data2VecTextModel`] and [`Data2VecTextModel`]. It is used to instantiate a Data2VecText model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the Data2VecText [facebook/data2vec-text-base](https://huggingface.co/facebook/data2vec-text-base) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 30522): Vocabulary size of the DATA2VEC model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`Data2VecModel`]. hidden_size (`int`, *optional*, defaults to 768): Dimensionality of the encoder layers and the pooler layer. num_hidden_layers (`int`, *optional*, defaults to 12): Number of hidden layers in the Transformer encoder. num_attention_heads (`int`, *optional*, defaults to 12): Number of attention heads for each attention layer in the Transformer encoder. intermediate_size (`int`, *optional*, defaults to 3072): Dimensionality of the "intermediate" (often named feed-forward) layer in the Transformer encoder. hidden_act (`str` or `Callable`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"silu"` and `"gelu_new"` are supported. hidden_dropout_prob (`float`, *optional*, defaults to 0.1): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. attention_probs_dropout_prob (`float`, *optional*, defaults to 0.1): The dropout ratio for the attention probabilities. max_position_embeddings (`int`, *optional*, defaults to 512): The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 512 or 1024 or 2048). type_vocab_size (`int`, *optional*, defaults to 2): The vocabulary size of the `token_type_ids` passed when calling [`Data2VecModel`]. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. position_embedding_type (`str`, *optional*, defaults to `"absolute"`): Type of position embedding. Choose one of `"absolute"`, `"relative_key"`, `"relative_key_query"`. For positional embeddings use `"absolute"`. For more information on `"relative_key"`, please refer to [Self-Attention with Relative Position Representations (Shaw et al.)](https://arxiv.org/abs/1803.02155). For more information on `"relative_key_query"`, please refer to *Method 4* in [Improve Transformer Models with Better Relative Position Embeddings (Huang et al.)](https://arxiv.org/abs/2009.13658). is_decoder (`bool`, *optional*, defaults to `False`): Whether the model is used as a decoder or not. If `False`, the model is used as an encoder. use_cache (`bool`, *optional*, defaults to `True`): Whether or not the model should return the last key/values attentions (not used by all models). Only relevant if `config.is_decoder=True`. classifier_dropout (`float`, *optional*): The dropout ratio for the classification head. Examples: ```python >>> from transformers import Data2VecTextConfig, Data2VecTextModel >>> # Initializing a Data2VecText facebook/data2vec-text-base style configuration >>> configuration = Data2VecTextConfig() >>> # Initializing a model (with random weights) from the facebook/data2vec-text-base style configuration >>> model = Data2VecTextModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "data2vec-text" def __init__( self, vocab_size=30522, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072, hidden_act="gelu", hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=512, type_vocab_size=2, initializer_range=0.02, layer_norm_eps=1e-12, pad_token_id=1, bos_token_id=0, eos_token_id=2, position_embedding_type="absolute", use_cache=True, classifier_dropout=None, **kwargs, ): super().__init__(pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs) self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.hidden_act = hidden_act self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.type_vocab_size = type_vocab_size self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.position_embedding_type = position_embedding_type self.use_cache = use_cache self.classifier_dropout = classifier_dropout class Data2VecTextOnnxConfig(OnnxConfig): @property def inputs(self) -> Mapping[str, Mapping[int, str]]: if self.task == "multiple-choice": dynamic_axis = {0: "batch", 1: "choice", 2: "sequence"} else: dynamic_axis = {0: "batch", 1: "sequence"} return OrderedDict( [ ("input_ids", dynamic_axis), ("attention_mask", dynamic_axis), ] )
27182812/ChatGLM-LLaMA-chinese-insturct
72,323
src/transformers/models/data2vec/modeling_data2vec_text.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """PyTorch Data2VecText model.""" import math from typing import List, Optional, Tuple, Union import torch import torch.utils.checkpoint from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACT2FN, gelu from ...modeling_outputs import ( BaseModelOutputWithPastAndCrossAttentions, BaseModelOutputWithPoolingAndCrossAttentions, CausalLMOutputWithCrossAttentions, MaskedLMOutput, MultipleChoiceModelOutput, QuestionAnsweringModelOutput, SequenceClassifierOutput, TokenClassifierOutput, ) from ...modeling_utils import PreTrainedModel from ...pytorch_utils import apply_chunking_to_forward, find_pruneable_heads_and_indices, prune_linear_layer from ...utils import ( add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_data2vec_text import Data2VecTextConfig logger = logging.get_logger(__name__) _HIDDEN_STATES_START_POSITION = 2 # General docstring _CHECKPOINT_FOR_DOC = "facebook/data2vec-text-base" _CONFIG_FOR_DOC = "Data2VecTextConfig" DATA2VEC_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST = [ "facebook/data2vec-text-base", # See all data2vec models at https://huggingface.co/models?filter=data2vec-text ] # Copied from transformers.models.roberta.modeling_roberta.RobertaEmbeddings with Roberta->Data2VecText class Data2VecTextForTextEmbeddings(nn.Module): """ Same as BertEmbeddings with a tiny tweak for positional embeddings indexing. """ # Copied from transformers.models.bert.modeling_bert.BertEmbeddings.__init__ def __init__(self, config): super().__init__() self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size) # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load # any TensorFlow checkpoint file self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout_prob) # position_ids (1, len position emb) is contiguous in memory and exported when serialized self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) self.register_buffer( "token_type_ids", torch.zeros(self.position_ids.size(), dtype=torch.long), persistent=False ) # End copy self.padding_idx = config.pad_token_id self.position_embeddings = nn.Embedding( config.max_position_embeddings, config.hidden_size, padding_idx=self.padding_idx ) def forward( self, input_ids=None, token_type_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0 ): if position_ids is None: if input_ids is not None: # Create the position ids from the input token ids. Any padded tokens remain padded. position_ids = create_position_ids_from_input_ids(input_ids, self.padding_idx, past_key_values_length) else: position_ids = self.create_position_ids_from_inputs_embeds(inputs_embeds) if input_ids is not None: input_shape = input_ids.size() else: input_shape = inputs_embeds.size()[:-1] seq_length = input_shape[1] # Setting the token_type_ids to the registered buffer in constructor where it is all zeros, which usually occurs # when its auto-generated, registered buffer helps users when tracing the model without passing token_type_ids, solves # issue #5664 if token_type_ids is None: if hasattr(self, "token_type_ids"): buffered_token_type_ids = self.token_type_ids[:, :seq_length] buffered_token_type_ids_expanded = buffered_token_type_ids.expand(input_shape[0], seq_length) token_type_ids = buffered_token_type_ids_expanded else: token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device) if inputs_embeds is None: inputs_embeds = self.word_embeddings(input_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = inputs_embeds + token_type_embeddings if self.position_embedding_type == "absolute": position_embeddings = self.position_embeddings(position_ids) embeddings += position_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings def create_position_ids_from_inputs_embeds(self, inputs_embeds): """ We are provided embeddings directly. We cannot infer which are padded so just generate sequential position ids. Args: inputs_embeds: torch.Tensor Returns: torch.Tensor """ input_shape = inputs_embeds.size()[:-1] sequence_length = input_shape[1] position_ids = torch.arange( self.padding_idx + 1, sequence_length + self.padding_idx + 1, dtype=torch.long, device=inputs_embeds.device ) return position_ids.unsqueeze(0).expand(input_shape) # Copied from transformers.models.roberta.modeling_roberta.RobertaSelfAttention with Roberta->Data2VecText class Data2VecTextSelfAttention(nn.Module): def __init__(self, config, position_embedding_type=None): super().__init__() if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): raise ValueError( f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention " f"heads ({config.num_attention_heads})" ) self.num_attention_heads = config.num_attention_heads self.attention_head_size = int(config.hidden_size / config.num_attention_heads) self.all_head_size = self.num_attention_heads * self.attention_head_size self.query = nn.Linear(config.hidden_size, self.all_head_size) self.key = nn.Linear(config.hidden_size, self.all_head_size) self.value = nn.Linear(config.hidden_size, self.all_head_size) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) self.position_embedding_type = position_embedding_type or getattr( config, "position_embedding_type", "absolute" ) if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": self.max_position_embeddings = config.max_position_embeddings self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size) self.is_decoder = config.is_decoder def transpose_for_scores(self, x: torch.Tensor) -> torch.Tensor: new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) x = x.view(new_x_shape) return x.permute(0, 2, 1, 3) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, past_key_value: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor]: mixed_query_layer = self.query(hidden_states) # If this is instantiated as a cross-attention module, the keys # and values come from an encoder; the attention mask needs to be # such that the encoder's padding tokens are not attended to. is_cross_attention = encoder_hidden_states is not None if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_layer = past_key_value[0] value_layer = past_key_value[1] attention_mask = encoder_attention_mask elif is_cross_attention: key_layer = self.transpose_for_scores(self.key(encoder_hidden_states)) value_layer = self.transpose_for_scores(self.value(encoder_hidden_states)) attention_mask = encoder_attention_mask elif past_key_value is not None: key_layer = self.transpose_for_scores(self.key(hidden_states)) value_layer = self.transpose_for_scores(self.value(hidden_states)) key_layer = torch.cat([past_key_value[0], key_layer], dim=2) value_layer = torch.cat([past_key_value[1], value_layer], dim=2) else: key_layer = self.transpose_for_scores(self.key(hidden_states)) value_layer = self.transpose_for_scores(self.value(hidden_states)) query_layer = self.transpose_for_scores(mixed_query_layer) use_cache = past_key_value is not None if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_layer, value_layer) # Take the dot product between "query" and "key" to get the raw attention scores. attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": query_length, key_length = query_layer.shape[2], key_layer.shape[2] if use_cache: position_ids_l = torch.tensor(key_length - 1, dtype=torch.long, device=hidden_states.device).view( -1, 1 ) else: position_ids_l = torch.arange(query_length, dtype=torch.long, device=hidden_states.device).view(-1, 1) position_ids_r = torch.arange(key_length, dtype=torch.long, device=hidden_states.device).view(1, -1) distance = position_ids_l - position_ids_r positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1) positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility if self.position_embedding_type == "relative_key": relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) attention_scores = attention_scores + relative_position_scores elif self.position_embedding_type == "relative_key_query": relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding) attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key attention_scores = attention_scores / math.sqrt(self.attention_head_size) if attention_mask is not None: # Apply the attention mask is (precomputed for all layers in Data2VecTextModel forward() function) attention_scores = attention_scores + attention_mask # Normalize the attention scores to probabilities. attention_probs = nn.functional.softmax(attention_scores, dim=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs) # Mask heads if we want to if head_mask is not None: attention_probs = attention_probs * head_mask context_layer = torch.matmul(attention_probs, value_layer) context_layer = context_layer.permute(0, 2, 1, 3).contiguous() new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) context_layer = context_layer.view(new_context_layer_shape) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) if self.is_decoder: outputs = outputs + (past_key_value,) return outputs # Copied from transformers.models.bert.modeling_bert.BertSelfOutput class Data2VecTextSelfOutput(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states # Copied from transformers.models.bert.modeling_bert.BertAttention with Bert->Data2VecText class Data2VecTextAttention(nn.Module): def __init__(self, config, position_embedding_type=None): super().__init__() self.self = Data2VecTextSelfAttention(config, position_embedding_type=position_embedding_type) self.output = Data2VecTextSelfOutput(config) self.pruned_heads = set() def prune_heads(self, heads): if len(heads) == 0: return heads, index = find_pruneable_heads_and_indices( heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads ) # Prune linear layers self.self.query = prune_linear_layer(self.self.query, index) self.self.key = prune_linear_layer(self.self.key, index) self.self.value = prune_linear_layer(self.self.value, index) self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) # Update hyper params and store pruned heads self.self.num_attention_heads = self.self.num_attention_heads - len(heads) self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads self.pruned_heads = self.pruned_heads.union(heads) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, past_key_value: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor]: self_outputs = self.self( hidden_states, attention_mask, head_mask, encoder_hidden_states, encoder_attention_mask, past_key_value, output_attentions, ) attention_output = self.output(self_outputs[0], hidden_states) outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them return outputs # Copied from transformers.models.bert.modeling_bert.BertIntermediate class Data2VecTextIntermediate(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.intermediate_size) if isinstance(config.hidden_act, str): self.intermediate_act_fn = ACT2FN[config.hidden_act] else: self.intermediate_act_fn = config.hidden_act def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states # Copied from transformers.models.bert.modeling_bert.BertOutput class Data2VecTextOutput(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.intermediate_size, config.hidden_size) self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states # Copied from transformers.models.bert.modeling_bert.BertLayer with Bert->Data2VecText class Data2VecTextLayer(nn.Module): def __init__(self, config): super().__init__() self.chunk_size_feed_forward = config.chunk_size_feed_forward self.seq_len_dim = 1 self.attention = Data2VecTextAttention(config) self.is_decoder = config.is_decoder self.add_cross_attention = config.add_cross_attention if self.add_cross_attention: if not self.is_decoder: raise ValueError(f"{self} should be used as a decoder model if cross attention is added") self.crossattention = Data2VecTextAttention(config, position_embedding_type="absolute") self.intermediate = Data2VecTextIntermediate(config) self.output = Data2VecTextOutput(config) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, past_key_value: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor]: # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None self_attention_outputs = self.attention( hidden_states, attention_mask, head_mask, output_attentions=output_attentions, past_key_value=self_attn_past_key_value, ) attention_output = self_attention_outputs[0] # if decoder, the last output is tuple of self-attn cache if self.is_decoder: outputs = self_attention_outputs[1:-1] present_key_value = self_attention_outputs[-1] else: outputs = self_attention_outputs[1:] # add self attentions if we output attention weights cross_attn_present_key_value = None if self.is_decoder and encoder_hidden_states is not None: if not hasattr(self, "crossattention"): raise ValueError( f"If `encoder_hidden_states` are passed, {self} has to be instantiated with cross-attention layers" " by setting `config.add_cross_attention=True`" ) # cross_attn cached key/values tuple is at positions 3,4 of past_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None cross_attention_outputs = self.crossattention( attention_output, attention_mask, head_mask, encoder_hidden_states, encoder_attention_mask, cross_attn_past_key_value, output_attentions, ) attention_output = cross_attention_outputs[0] outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights # add cross-attn cache to positions 3,4 of present_key_value tuple cross_attn_present_key_value = cross_attention_outputs[-1] present_key_value = present_key_value + cross_attn_present_key_value layer_output = apply_chunking_to_forward( self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output ) outputs = (layer_output,) + outputs # if decoder, return the attn key/values as the last output if self.is_decoder: outputs = outputs + (present_key_value,) return outputs def feed_forward_chunk(self, attention_output): intermediate_output = self.intermediate(attention_output) layer_output = self.output(intermediate_output, attention_output) return layer_output # Copied from transformers.models.bert.modeling_bert.BertEncoder with Bert->Data2VecText class Data2VecTextEncoder(nn.Module): def __init__(self, config): super().__init__() self.config = config self.layer = nn.ModuleList([Data2VecTextLayer(config) for _ in range(config.num_hidden_layers)]) self.gradient_checkpointing = False def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool] = False, return_dict: Optional[bool] = True, ) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPastAndCrossAttentions]: all_hidden_states = () if output_hidden_states else None all_self_attentions = () if output_attentions else None all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None next_decoder_cache = () if use_cache else None for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_head_mask = head_mask[i] if head_mask is not None else None past_key_value = past_key_values[i] if past_key_values is not None else None if self.gradient_checkpointing and self.training: if use_cache: logger.warning_once( "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, past_key_value, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(layer_module), hidden_states, attention_mask, layer_head_mask, encoder_hidden_states, encoder_attention_mask, ) else: layer_outputs = layer_module( hidden_states, attention_mask, layer_head_mask, encoder_hidden_states, encoder_attention_mask, past_key_value, output_attentions, ) hidden_states = layer_outputs[0] if use_cache: next_decoder_cache += (layer_outputs[-1],) if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) if self.config.add_cross_attention: all_cross_attentions = all_cross_attentions + (layer_outputs[2],) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple( v for v in [ hidden_states, next_decoder_cache, all_hidden_states, all_self_attentions, all_cross_attentions, ] if v is not None ) return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=next_decoder_cache, hidden_states=all_hidden_states, attentions=all_self_attentions, cross_attentions=all_cross_attentions, ) # Copied from transformers.models.bert.modeling_bert.BertPooler class Data2VecTextPooler(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.activation = nn.Tanh() def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: # We "pool" the model by simply taking the hidden state corresponding # to the first token. first_token_tensor = hidden_states[:, 0] pooled_output = self.dense(first_token_tensor) pooled_output = self.activation(pooled_output) return pooled_output class Data2VecTextPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = Data2VecTextConfig base_model_prefix = "data2vec_text" supports_gradient_checkpointing = True _no_split_modules = [] def _init_weights(self, module): """Initialize the weights""" if isinstance(module, nn.Linear): # Slightly different from the TF version which uses truncated_normal for initialization # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() elif isinstance(module, nn.LayerNorm): if hasattr(module, "bias") and module.bias is not None: module.bias.data.zero_() if hasattr(module, "weight") and module.weight is not None: module.weight.data.fill_(1.0) def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, Data2VecTextEncoder): module.gradient_checkpointing = value def update_keys_to_ignore(self, config, del_keys_to_ignore): """Remove some keys from ignore list""" if not config.tie_word_embeddings: # must make a new list, or the class variable gets modified! self._keys_to_ignore_on_save = [k for k in self._keys_to_ignore_on_save if k not in del_keys_to_ignore] self._keys_to_ignore_on_load_missing = [ k for k in self._keys_to_ignore_on_load_missing if k not in del_keys_to_ignore ] DATA2VECTEXT_START_DOCSTRING = r""" Data2VecText was proposed in [data2vec: A General Framework for Self-supervised Learning in Speech, Vision and Language](https://arxiv.org/pdf/2202.03555) by Alexei Baevski, Wei-Ning Hsu, Qiantong Xu, Arun Babu, Jiatao Gu and Michael Auli. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`Data2VecTextConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ DATA2VECTEXT_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) position_ids (`torch.LongTensor` of shape `({0})`, *optional*): Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare Data2VecText Model for text transformer outputting raw hidden-states without any specific head on top.", DATA2VECTEXT_START_DOCSTRING, ) class Data2VecTextModel(Data2VecTextPreTrainedModel): """ The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of cross-attention is added between the self-attention layers, following the architecture described in *Attention is all you need*_ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin. To behave as an decoder the model needs to be initialized with the `is_decoder` argument of the configuration set to `True`. To be used in a Seq2Seq model, the model needs to initialized with both `is_decoder` argument and `add_cross_attention` set to `True`; an `encoder_hidden_states` is then expected as an input to the forward pass. .. _*Attention is all you need*: https://arxiv.org/abs/1706.03762 """ _keys_to_ignore_on_load_missing = [r"position_ids"] def __init__(self, config, add_pooling_layer=True): super().__init__(config) self.config = config self.embeddings = Data2VecTextForTextEmbeddings(config) self.encoder = Data2VecTextEncoder(config) self.pooler = Data2VecTextPooler(config) if add_pooling_layer else None # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): self.encoder.layer[layer].attention.prune_heads(heads) @add_start_docstrings_to_model_forward(DATA2VECTEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=BaseModelOutputWithPoolingAndCrossAttentions, config_class=_CONFIG_FOR_DOC, ) # Copied from transformers.models.bert.modeling_bert.BertModel.forward def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, token_type_ids: Optional[torch.Tensor] = None, position_ids: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPoolingAndCrossAttentions]: r""" encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if self.config.is_decoder: use_cache = use_cache if use_cache is not None else self.config.use_cache else: use_cache = False if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") batch_size, seq_length = input_shape device = input_ids.device if input_ids is not None else inputs_embeds.device # past_key_values_length past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 if attention_mask is None: attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device) if token_type_ids is None: if hasattr(self.embeddings, "token_type_ids"): buffered_token_type_ids = self.embeddings.token_type_ids[:, :seq_length] buffered_token_type_ids_expanded = buffered_token_type_ids.expand(batch_size, seq_length) token_type_ids = buffered_token_type_ids_expanded else: token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device) # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] # ourselves in which case we just need to make it broadcastable to all heads. extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape) # If a 2D or 3D attention mask is provided for the cross-attention # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] if self.config.is_decoder and encoder_hidden_states is not None: encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size() encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) else: encoder_extended_attention_mask = None # Prepare head mask if needed # 1.0 in head_mask indicate we keep the head # attention_probs has shape bsz x n_heads x N x N # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) embedding_output = self.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds, past_key_values_length=past_key_values_length, ) encoder_outputs = self.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, past_key_values=past_key_values, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.pooler(sequence_output) if self.pooler is not None else None if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPoolingAndCrossAttentions( last_hidden_state=sequence_output, pooler_output=pooled_output, past_key_values=encoder_outputs.past_key_values, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, cross_attentions=encoder_outputs.cross_attentions, ) @add_start_docstrings( """Data2VecText Model with a `language modeling` head on top for CLM fine-tuning.""", DATA2VECTEXT_START_DOCSTRING ) class Data2VecTextForCausalLM(Data2VecTextPreTrainedModel): _keys_to_ignore_on_save = [r"lm_head.decoder.weight", r"lm_head.decoder.bias"] _keys_to_ignore_on_load_missing = [r"position_ids", r"lm_head.decoder.weight", r"lm_head.decoder.bias"] _keys_to_ignore_on_load_unexpected = [r"pooler"] def __init__(self, config): super().__init__(config) if not config.is_decoder: logger.warning("If you want to use `Data2VecTextLMHeadModel` as a standalone, add `is_decoder=True.`") self.data2vec_text = Data2VecTextModel(config, add_pooling_layer=False) self.lm_head = Data2VecTextLMHead(config) # The LM head weights require special treatment only when they are tied with the word embeddings self.update_keys_to_ignore(config, ["lm_head.decoder.weight"]) # Initialize weights and apply final processing self.post_init() def get_output_embeddings(self): return self.lm_head.decoder def set_output_embeddings(self, new_embeddings): self.lm_head.decoder = new_embeddings @add_start_docstrings_to_model_forward(DATA2VECTEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, CausalLMOutputWithCrossAttentions]: r""" encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]` past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). Returns: Example: ```python >>> from transformers import AutoTokenizer, Data2VecTextForCausalLM, Data2VecTextConfig >>> import torch >>> tokenizer = AutoTokenizer.from_pretrained("facebook/data2vec-text-base") >>> config = Data2VecTextConfig.from_pretrained("facebook/data2vec-text-base") >>> config.is_decoder = True >>> model = Data2VecTextForCausalLM.from_pretrained("facebook/data2vec-text-base", config=config) >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") >>> outputs = model(**inputs) >>> prediction_logits = outputs.logits ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict if labels is not None: use_cache = False outputs = self.data2vec_text( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, past_key_values=past_key_values, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] prediction_scores = self.lm_head(sequence_output) lm_loss = None if labels is not None: # we are doing next-token prediction; shift prediction scores and input ids by one shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous() labels = labels[:, 1:].contiguous() loss_fct = CrossEntropyLoss() lm_loss = loss_fct(shifted_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (prediction_scores,) + outputs[2:] return ((lm_loss,) + output) if lm_loss is not None else output return CausalLMOutputWithCrossAttentions( loss=lm_loss, logits=prediction_scores, past_key_values=outputs.past_key_values, hidden_states=outputs.hidden_states, attentions=outputs.attentions, cross_attentions=outputs.cross_attentions, ) def prepare_inputs_for_generation(self, input_ids, past_key_values=None, attention_mask=None, **model_kwargs): input_shape = input_ids.shape # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly if attention_mask is None: attention_mask = input_ids.new_ones(input_shape) # cut decoder_input_ids if past is used if past_key_values is not None: input_ids = input_ids[:, -1:] return {"input_ids": input_ids, "attention_mask": attention_mask, "past_key_values": past_key_values} def _reorder_cache(self, past_key_values, beam_idx): reordered_past = () for layer_past in past_key_values: reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) return reordered_past @add_start_docstrings("""data2vec Model with a `language modeling` head on top.""", DATA2VECTEXT_START_DOCSTRING) class Data2VecTextForMaskedLM(Data2VecTextPreTrainedModel): _keys_to_ignore_on_save = [r"lm_head.decoder.weight", r"lm_head.decoder.bias"] _keys_to_ignore_on_load_missing = [r"position_ids", r"lm_head.decoder.weight", r"lm_head.decoder.bias"] _keys_to_ignore_on_load_unexpected = [r"pooler"] def __init__(self, config): super().__init__(config) if config.is_decoder: logger.warning( "If you want to use `Data2VecTextForMaskedLM` make sure `config.is_decoder=False` for " "bi-directional self-attention." ) self.data2vec_text = Data2VecTextModel(config, add_pooling_layer=False) self.lm_head = Data2VecTextLMHead(config) # The LM head weights require special treatment only when they are tied with the word embeddings self.update_keys_to_ignore(config, ["lm_head.decoder.weight"]) # Initialize weights and apply final processing self.post_init() def get_output_embeddings(self): return self.lm_head.decoder def set_output_embeddings(self, new_embeddings): self.lm_head.decoder = new_embeddings @add_start_docstrings_to_model_forward(DATA2VECTEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=MaskedLMOutput, config_class=_CONFIG_FOR_DOC, mask="<mask>", ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, MaskedLMOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]` kwargs (`Dict[str, any]`, optional, defaults to *{}*): Used to hide legacy arguments that have been deprecated. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.data2vec_text( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] prediction_scores = self.lm_head(sequence_output) masked_lm_loss = None if labels is not None: loss_fct = CrossEntropyLoss() masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (prediction_scores,) + outputs[2:] return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output return MaskedLMOutput( loss=masked_lm_loss, logits=prediction_scores, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) # Copied from transformers.models.roberta.modeling_roberta.RobertaLMHead with Roberta->Data2VecText class Data2VecTextLMHead(nn.Module): """Data2VecText Head for masked language modeling.""" def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.layer_norm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.decoder = nn.Linear(config.hidden_size, config.vocab_size) self.bias = nn.Parameter(torch.zeros(config.vocab_size)) self.decoder.bias = self.bias def forward(self, features, **kwargs): x = self.dense(features) x = gelu(x) x = self.layer_norm(x) # project back to size of vocabulary with bias x = self.decoder(x) return x def _tie_weights(self): # To tie those two weights if they get disconnected (on TPU or when the bias is resized) # For accelerate compatibility and to not break backward compatibility if self.decoder.bias.device.type == "meta": self.decoder.bias = self.bias else: self.bias = self.decoder.bias @add_start_docstrings( """ Data2VecText Model transformer with a sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for GLUE tasks. """, DATA2VECTEXT_START_DOCSTRING, ) class Data2VecTextForSequenceClassification(Data2VecTextPreTrainedModel): _keys_to_ignore_on_load_missing = [r"position_ids"] def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.config = config self.data2vec_text = Data2VecTextModel(config, add_pooling_layer=False) self.classifier = Data2VecTextClassificationHead(config) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(DATA2VECTEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, SequenceClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.data2vec_text( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] logits = self.classifier(sequence_output) loss = None if labels is not None: if self.config.problem_type is None: if self.num_labels == 1: self.config.problem_type = "regression" elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): self.config.problem_type = "single_label_classification" else: self.config.problem_type = "multi_label_classification" if self.config.problem_type == "regression": loss_fct = MSELoss() if self.num_labels == 1: loss = loss_fct(logits.squeeze(), labels.squeeze()) else: loss = loss_fct(logits, labels) elif self.config.problem_type == "single_label_classification": loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) elif self.config.problem_type == "multi_label_classification": loss_fct = BCEWithLogitsLoss() loss = loss_fct(logits, labels) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ Data2VecText Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a softmax) e.g. for RocStories/SWAG tasks. """, DATA2VECTEXT_START_DOCSTRING, ) class Data2VecTextForMultipleChoice(Data2VecTextPreTrainedModel): _keys_to_ignore_on_load_missing = [r"position_ids"] def __init__(self, config): super().__init__(config) self.data2vec_text = Data2VecTextModel(config) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, 1) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward( DATA2VECTEXT_INPUTS_DOCSTRING.format("batch_size, num_choices, sequence_length") ) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=MultipleChoiceModelOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, MultipleChoiceModelOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the multiple choice classification loss. Indices should be in `[0, ..., num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See `input_ids` above) """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict num_choices = input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1] flat_input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None flat_position_ids = position_ids.view(-1, position_ids.size(-1)) if position_ids is not None else None flat_token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None flat_attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None flat_inputs_embeds = ( inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1)) if inputs_embeds is not None else None ) outputs = self.data2vec_text( flat_input_ids, position_ids=flat_position_ids, token_type_ids=flat_token_type_ids, attention_mask=flat_attention_mask, head_mask=head_mask, inputs_embeds=flat_inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) reshaped_logits = logits.view(-1, num_choices) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(reshaped_logits, labels) if not return_dict: output = (reshaped_logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return MultipleChoiceModelOutput( loss=loss, logits=reshaped_logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ Data2VecText Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. """, DATA2VECTEXT_START_DOCSTRING, ) class Data2VecTextForTokenClassification(Data2VecTextPreTrainedModel): _keys_to_ignore_on_load_unexpected = [r"pooler"] _keys_to_ignore_on_load_missing = [r"position_ids"] def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.data2vec_text = Data2VecTextModel(config, add_pooling_layer=False) classifier_dropout = ( config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob ) self.dropout = nn.Dropout(classifier_dropout) self.classifier = nn.Linear(config.hidden_size, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(DATA2VECTEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TokenClassifierOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, TokenClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.data2vec_text( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] sequence_output = self.dropout(sequence_output) logits = self.classifier(sequence_output) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return TokenClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) # Copied from transformers.models.roberta.modeling_roberta.RobertaClassificationHead with Roberta->Data2VecText class Data2VecTextClassificationHead(nn.Module): """Head for sentence-level classification tasks.""" def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) classifier_dropout = ( config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob ) self.dropout = nn.Dropout(classifier_dropout) self.out_proj = nn.Linear(config.hidden_size, config.num_labels) def forward(self, features, **kwargs): x = features[:, 0, :] # take <s> token (equiv. to [CLS]) x = self.dropout(x) x = self.dense(x) x = torch.tanh(x) x = self.dropout(x) x = self.out_proj(x) return x @add_start_docstrings( """ Data2VecText Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of the hidden-states output to compute `span start logits` and `span end logits`). """, DATA2VECTEXT_START_DOCSTRING, ) class Data2VecTextForQuestionAnswering(Data2VecTextPreTrainedModel): _keys_to_ignore_on_load_unexpected = [r"pooler"] _keys_to_ignore_on_load_missing = [r"position_ids"] def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.data2vec_text = Data2VecTextModel(config, add_pooling_layer=False) self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(DATA2VECTEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=QuestionAnsweringModelOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, start_positions: Optional[torch.LongTensor] = None, end_positions: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, QuestionAnsweringModelOutput]: r""" start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the start of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. end_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the end of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.data2vec_text( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] logits = self.qa_outputs(sequence_output) start_logits, end_logits = logits.split(1, dim=-1) start_logits = start_logits.squeeze(-1).contiguous() end_logits = end_logits.squeeze(-1).contiguous() total_loss = None if start_positions is not None and end_positions is not None: # If we are on multi-GPU, split add a dimension if len(start_positions.size()) > 1: start_positions = start_positions.squeeze(-1) if len(end_positions.size()) > 1: end_positions = end_positions.squeeze(-1) # sometimes the start/end positions are outside our model inputs, we ignore these terms ignored_index = start_logits.size(1) start_positions = start_positions.clamp(0, ignored_index) end_positions = end_positions.clamp(0, ignored_index) loss_fct = CrossEntropyLoss(ignore_index=ignored_index) start_loss = loss_fct(start_logits, start_positions) end_loss = loss_fct(end_logits, end_positions) total_loss = (start_loss + end_loss) / 2 if not return_dict: output = (start_logits, end_logits) + outputs[2:] return ((total_loss,) + output) if total_loss is not None else output return QuestionAnsweringModelOutput( loss=total_loss, start_logits=start_logits, end_logits=end_logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def create_position_ids_from_input_ids(input_ids, padding_idx, past_key_values_length=0): """ Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols are ignored. This is modified from fairseq's `utils.make_positions`. Args: x: torch.Tensor x: Returns: torch.Tensor """ # The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA. mask = input_ids.ne(padding_idx).int() incremental_indices = (torch.cumsum(mask, dim=1).type_as(mask) + past_key_values_length) * mask return incremental_indices.long() + padding_idx
27182812/ChatGLM-LLaMA-chinese-insturct
65,798
src/transformers/models/data2vec/modeling_data2vec_audio.py
# coding=utf-8 # Copyright 2021 The Fairseq Authors and the HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch Data2VecAudio model.""" import math import warnings from typing import Optional, Tuple, Union import numpy as np import torch import torch.utils.checkpoint from torch import nn from torch.nn import CrossEntropyLoss from ...activations import ACT2FN from ...deepspeed import is_deepspeed_zero3_enabled from ...modeling_outputs import ( BaseModelOutput, CausalLMOutput, SequenceClassifierOutput, TokenClassifierOutput, Wav2Vec2BaseModelOutput, XVectorOutput, ) from ...modeling_utils import PreTrainedModel from ...pytorch_utils import torch_int_div from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging from .configuration_data2vec_audio import Data2VecAudioConfig logger = logging.get_logger(__name__) _HIDDEN_STATES_START_POSITION = 2 # General docstring _CONFIG_FOR_DOC = "Data2VecAudioConfig" # Base docstring _CHECKPOINT_FOR_DOC = "facebook/data2vec-audio-base-960h" _EXPECTED_OUTPUT_SHAPE = [1, 292, 768] # CTC docstring _CTC_EXPECTED_OUTPUT = "'MISTER QUILTER IS THE APOSTLE OF THE MIDDLE CLASSES AND WE ARE GLAD TO WELCOME HIS GOSPEL'" _CTC_EXPECTED_LOSS = 66.95 DATA2VEC_AUDIO_PRETRAINED_MODEL_ARCHIVE_LIST = [ "facebook/data2vec-audio-base", "facebook/data2vec-audio-base-10m", "facebook/data2vec-audio-base-100h", "facebook/data2vec-audio-base-960h", # See all Data2VecAudio models at https://huggingface.co/models?filter=data2vec-audio ] # Copied from transformers.models.wav2vec2.modeling_wav2vec2._compute_mask_indices def _compute_mask_indices( shape: Tuple[int, int], mask_prob: float, mask_length: int, attention_mask: Optional[torch.LongTensor] = None, min_masks: int = 0, ) -> np.ndarray: """ Computes random mask spans for a given shape. Used to implement [SpecAugment: A Simple Data Augmentation Method for ASR](https://arxiv.org/abs/1904.08779). Note that this method is not optimized to run on TPU and should be run on CPU as part of the preprocessing during training. Args: shape: The shape for which to compute masks. This should be of a tuple of size 2 where the first element is the batch size and the second element is the length of the axis to span. mask_prob: The percentage of the whole axis (between 0 and 1) which will be masked. The number of independently generated mask spans of length `mask_length` is computed by `mask_prob*shape[1]/mask_length`. Note that due to overlaps, `mask_prob` is an upper bound and the actual percentage will be smaller. mask_length: size of the mask min_masks: minimum number of masked spans attention_mask: A (right-padded) attention mask which independently shortens the feature axis of each batch dimension. """ batch_size, sequence_length = shape if mask_length < 1: raise ValueError("`mask_length` has to be bigger than 0.") if mask_length > sequence_length: raise ValueError( f"`mask_length` has to be smaller than `sequence_length`, but got `mask_length`: {mask_length}" f" and `sequence_length`: {sequence_length}`" ) # epsilon is used for probabilistic rounding epsilon = np.random.rand(1).item() def compute_num_masked_span(input_length): """Given input length, compute how many spans should be masked""" num_masked_span = int(mask_prob * input_length / mask_length + epsilon) num_masked_span = max(num_masked_span, min_masks) # make sure num masked span <= sequence_length if num_masked_span * mask_length > sequence_length: num_masked_span = sequence_length // mask_length # make sure num_masked span is also <= input_length - (mask_length - 1) if input_length - (mask_length - 1) < num_masked_span: num_masked_span = max(input_length - (mask_length - 1), 0) return num_masked_span # compute number of masked spans in batch input_lengths = ( attention_mask.sum(-1).detach().tolist() if attention_mask is not None else [sequence_length for _ in range(batch_size)] ) # SpecAugment mask to fill spec_aug_mask = np.zeros((batch_size, sequence_length), dtype=bool) spec_aug_mask_idxs = [] max_num_masked_span = compute_num_masked_span(sequence_length) if max_num_masked_span == 0: return spec_aug_mask for input_length in input_lengths: # compute num of masked spans for this input num_masked_span = compute_num_masked_span(input_length) # get random indices to mask spec_aug_mask_idx = np.random.choice( np.arange(input_length - (mask_length - 1)), num_masked_span, replace=False ) # pick first sampled index that will serve as a dummy index to pad vector # to ensure same dimension for all batches due to probabilistic rounding # Picking first sample just pads those vectors twice. if len(spec_aug_mask_idx) == 0: # this case can only happen if `input_length` is strictly smaller then # `sequence_length` in which case the last token has to be a padding # token which we can use as a dummy mask id dummy_mask_idx = sequence_length - 1 else: dummy_mask_idx = spec_aug_mask_idx[0] spec_aug_mask_idx = np.concatenate( [spec_aug_mask_idx, np.ones(max_num_masked_span - num_masked_span, dtype=np.int32) * dummy_mask_idx] ) spec_aug_mask_idxs.append(spec_aug_mask_idx) spec_aug_mask_idxs = np.array(spec_aug_mask_idxs) # expand masked indices to masked spans spec_aug_mask_idxs = np.broadcast_to( spec_aug_mask_idxs[:, :, None], (batch_size, max_num_masked_span, mask_length) ) spec_aug_mask_idxs = spec_aug_mask_idxs.reshape(batch_size, max_num_masked_span * mask_length) # add offset to the starting indexes so that indexes now create a span offsets = np.arange(mask_length)[None, None, :] offsets = np.broadcast_to(offsets, (batch_size, max_num_masked_span, mask_length)).reshape( batch_size, max_num_masked_span * mask_length ) spec_aug_mask_idxs = spec_aug_mask_idxs + offsets # ensure that we cannot have indices larger than sequence_length if spec_aug_mask_idxs.max() > sequence_length - 1: spec_aug_mask_idxs[spec_aug_mask_idxs > sequence_length - 1] = sequence_length - 1 # scatter indices to mask np.put_along_axis(spec_aug_mask, spec_aug_mask_idxs, 1, -1) return spec_aug_mask class Data2VecAudioConvLayer(nn.Module): def __init__(self, config, layer_id=0): super().__init__() self.in_conv_dim = config.conv_dim[layer_id - 1] if layer_id > 0 else 1 self.out_conv_dim = config.conv_dim[layer_id] self.conv = nn.Conv1d( self.in_conv_dim, self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], stride=config.conv_stride[layer_id], bias=config.conv_bias, ) self.layer_norm = nn.LayerNorm(self.out_conv_dim, elementwise_affine=True) self.activation = ACT2FN[config.feat_extract_activation] def forward(self, hidden_states): hidden_states = self.conv(hidden_states) hidden_states = hidden_states.transpose(-2, -1) hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states.transpose(-2, -1) hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2SamePadLayer with Wav2Vec2->Data2VecAudio class Data2VecAudioPadLayer(nn.Module): def __init__(self, num_conv_pos_embeddings): super().__init__() self.num_pad_remove = 1 if num_conv_pos_embeddings % 2 == 0 else 0 def forward(self, hidden_states): if self.num_pad_remove > 0: hidden_states = hidden_states[:, :, : -self.num_pad_remove] return hidden_states class Data2VecAudioPositionalConvLayer(nn.Module): def __init__(self, config): super().__init__() self.conv = nn.Conv1d( config.hidden_size, config.hidden_size, kernel_size=config.conv_pos_kernel_size, padding=config.conv_pos_kernel_size // 2, groups=config.num_conv_pos_embedding_groups, ) self.padding = Data2VecAudioPadLayer(config.conv_pos_kernel_size) self.activation = ACT2FN[config.feat_extract_activation] # no learnable parameters self.layer_norm = nn.LayerNorm(config.hidden_size, elementwise_affine=False) def forward(self, hidden_states): hidden_states = self.conv(hidden_states) hidden_states = self.padding(hidden_states) hidden_states = hidden_states.transpose(1, 2) hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states.transpose(1, 2) hidden_states = self.activation(hidden_states) return hidden_states class Data2VecAudioPositionalConvEmbedding(nn.Module): def __init__(self, config): super().__init__() self.layers = nn.ModuleList( [Data2VecAudioPositionalConvLayer(config) for _ in range(config.num_conv_pos_embeddings)] ) def forward(self, hidden_states): hidden_states = hidden_states.transpose(1, 2) for layer in self.layers: hidden_states = layer(hidden_states) hidden_states = hidden_states.transpose(1, 2) return hidden_states class Data2VecAudioFeatureEncoder(nn.Module): """Construct the features from raw audio waveform""" def __init__(self, config): super().__init__() self.conv_layers = nn.ModuleList( [Data2VecAudioConvLayer(config, layer_id=i) for i in range(config.num_feat_extract_layers)] ) self.gradient_checkpointing = False self._requires_grad = True # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2FeatureEncoder._freeze_parameters def _freeze_parameters(self): for param in self.parameters(): param.requires_grad = False self._requires_grad = False # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2FeatureEncoder.forward def forward(self, input_values): hidden_states = input_values[:, None] # make sure hidden_states require grad for gradient_checkpointing if self._requires_grad and self.training: hidden_states.requires_grad = True for conv_layer in self.conv_layers: if self._requires_grad and self.gradient_checkpointing and self.training: def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs) return custom_forward hidden_states = torch.utils.checkpoint.checkpoint( create_custom_forward(conv_layer), hidden_states, ) else: hidden_states = conv_layer(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2FeatureProjection with Wav2Vec2->Data2VecAudio class Data2VecAudioFeatureProjection(nn.Module): def __init__(self, config): super().__init__() self.layer_norm = nn.LayerNorm(config.conv_dim[-1], eps=config.layer_norm_eps) self.projection = nn.Linear(config.conv_dim[-1], config.hidden_size) self.dropout = nn.Dropout(config.feat_proj_dropout) def forward(self, hidden_states): # non-projected hidden states are needed for quantization norm_hidden_states = self.layer_norm(hidden_states) hidden_states = self.projection(norm_hidden_states) hidden_states = self.dropout(hidden_states) return hidden_states, norm_hidden_states # Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->Data2VecAudio class Data2VecAudioAttention(nn.Module): """Multi-headed attention from 'Attention Is All You Need' paper""" def __init__( self, embed_dim: int, num_heads: int, dropout: float = 0.0, is_decoder: bool = False, bias: bool = True, ): super().__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = dropout self.head_dim = embed_dim // num_heads if (self.head_dim * num_heads) != self.embed_dim: raise ValueError( f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" f" and `num_heads`: {num_heads})." ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() def forward( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, _ = hidden_states.size() # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj # `past_key_value[0].shape[2] == key_value_states.shape[1]` # is checking that the `sequence_length` of the `past_key_value` is the same as # the provided `key_value_states` to support prefix tuning if ( is_cross_attention and past_key_value is not None and past_key_value[0].shape[2] == key_value_states.shape[1] ): # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = torch.cat([past_key_value[0], key_states], dim=2) value_states = torch.cat([past_key_value[1], value_states], dim=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) key_states = key_states.reshape(*proj_shape) value_states = value_states.reshape(*proj_shape) src_len = key_states.size(1) attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): raise ValueError( f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" f" {attn_weights.size()}" ) if attention_mask is not None: if attention_mask.size() != (bsz, 1, tgt_len, src_len): raise ValueError( f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" ) attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) attn_weights = nn.functional.softmax(attn_weights, dim=-1) if layer_head_mask is not None: if layer_head_mask.size() != (self.num_heads,): raise ValueError( f"Head mask for a single layer should be of size {(self.num_heads,)}, but is" f" {layer_head_mask.size()}" ) attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) if output_attentions: # this operation is a bit awkward, but it's required to # make sure that attn_weights keeps its gradient. # In order to do so, attn_weights have to be reshaped # twice and have to be reused in the following attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) else: attn_weights_reshaped = None attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training) attn_output = torch.bmm(attn_probs, value_states) if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): raise ValueError( f"`attn_output` should be of size {(bsz * self.num_heads, tgt_len, self.head_dim)}, but is" f" {attn_output.size()}" ) attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) attn_output = attn_output.transpose(1, 2) # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be # partitioned across GPUs when using tensor-parallelism. attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim) attn_output = self.out_proj(attn_output) return attn_output, attn_weights_reshaped, past_key_value # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2FeedForward with Wav2Vec2->Data2VecAudio class Data2VecAudioFeedForward(nn.Module): def __init__(self, config): super().__init__() self.intermediate_dropout = nn.Dropout(config.activation_dropout) self.intermediate_dense = nn.Linear(config.hidden_size, config.intermediate_size) if isinstance(config.hidden_act, str): self.intermediate_act_fn = ACT2FN[config.hidden_act] else: self.intermediate_act_fn = config.hidden_act self.output_dense = nn.Linear(config.intermediate_size, config.hidden_size) self.output_dropout = nn.Dropout(config.hidden_dropout) def forward(self, hidden_states): hidden_states = self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states) hidden_states = self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2EncoderLayer with Wav2Vec2->Data2VecAudio class Data2VecAudioEncoderLayer(nn.Module): def __init__(self, config): super().__init__() self.attention = Data2VecAudioAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, ) self.dropout = nn.Dropout(config.hidden_dropout) self.layer_norm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.feed_forward = Data2VecAudioFeedForward(config) self.final_layer_norm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) def forward(self, hidden_states, attention_mask=None, output_attentions=False): attn_residual = hidden_states hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask, output_attentions=output_attentions ) hidden_states = self.dropout(hidden_states) hidden_states = attn_residual + hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states + self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2Encoder with Wav2Vec2->Data2VecAudio class Data2VecAudioEncoder(nn.Module): def __init__(self, config): super().__init__() self.config = config self.pos_conv_embed = Data2VecAudioPositionalConvEmbedding(config) self.layer_norm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout) self.layers = nn.ModuleList([Data2VecAudioEncoderLayer(config) for _ in range(config.num_hidden_layers)]) self.gradient_checkpointing = False def forward( self, hidden_states: torch.tensor, attention_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, ): all_hidden_states = () if output_hidden_states else None all_self_attentions = () if output_attentions else None if attention_mask is not None: # make sure padded tokens output 0 expand_attention_mask = attention_mask.unsqueeze(-1).repeat(1, 1, hidden_states.shape[2]) hidden_states[~expand_attention_mask] = 0 # extend attention_mask attention_mask = 1.0 - attention_mask[:, None, None, :].to(dtype=hidden_states.dtype) attention_mask = attention_mask * torch.finfo(hidden_states.dtype).min attention_mask = attention_mask.expand( attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1] ) position_embeddings = self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings hidden_states = self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states) deepspeed_zero3_is_enabled = is_deepspeed_zero3_enabled() for layer in self.layers: if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = np.random.uniform(0, 1) skip_the_layer = True if self.training and (dropout_probability < self.config.layerdrop) else False if not skip_the_layer or deepspeed_zero3_is_enabled: # under deepspeed zero3 all gpus must run in sync if self.gradient_checkpointing and self.training: # create gradient checkpointing function def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(layer), hidden_states, attention_mask, ) else: layer_outputs = layer( hidden_states, attention_mask=attention_mask, output_attentions=output_attentions ) hidden_states = layer_outputs[0] if skip_the_layer: layer_outputs = (None, None) if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if v is not None) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2Adapter with Wav2Vec2->Data2VecAudio class Data2VecAudioAdapter(nn.Module): def __init__(self, config): super().__init__() # feature dim might need to be down-projected if config.output_hidden_size != config.hidden_size: self.proj = nn.Linear(config.hidden_size, config.output_hidden_size) self.proj_layer_norm = nn.LayerNorm(config.output_hidden_size) else: self.proj = self.proj_layer_norm = None self.layers = nn.ModuleList(Data2VecAudioAdapterLayer(config) for _ in range(config.num_adapter_layers)) self.layerdrop = config.layerdrop def forward(self, hidden_states): # down project hidden_states if necessary if self.proj is not None and self.proj_layer_norm is not None: hidden_states = self.proj(hidden_states) hidden_states = self.proj_layer_norm(hidden_states) hidden_states = hidden_states.transpose(1, 2) for layer in self.layers: layerdrop_prob = np.random.random() if not self.training or (layerdrop_prob > self.layerdrop): hidden_states = layer(hidden_states) hidden_states = hidden_states.transpose(1, 2) return hidden_states # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2AdapterLayer with Wav2Vec2->Data2VecAudio class Data2VecAudioAdapterLayer(nn.Module): def __init__(self, config): super().__init__() self.conv = nn.Conv1d( config.output_hidden_size, 2 * config.output_hidden_size, config.adapter_kernel_size, stride=config.adapter_stride, padding=1, ) def forward(self, hidden_states): hidden_states = self.conv(hidden_states) hidden_states = nn.functional.glu(hidden_states, dim=1) return hidden_states class Data2VecAudioPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = Data2VecAudioConfig base_model_prefix = "data2vec_audio" main_input_name = "input_values" _keys_to_ignore_on_load_missing = [r"position_ids"] supports_gradient_checkpointing = True def _init_weights(self, module): """Initialize the weights""" if isinstance(module, Data2VecAudioFeatureProjection): k = math.sqrt(1 / module.projection.in_features) nn.init.uniform_(module.projection.weight, a=-k, b=k) nn.init.uniform_(module.projection.bias, a=-k, b=k) elif isinstance(module, Data2VecAudioPositionalConvLayer): nn.init.constant_(module.conv.bias, 0) elif isinstance(module, nn.Linear): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, (nn.LayerNorm, nn.GroupNorm)): if module.bias is not None: module.bias.data.zero_() if module.weight is not None: module.weight.data.fill_(1.0) elif isinstance(module, nn.Conv1d): nn.init.kaiming_normal_(module.weight) if module.bias is not None: k = math.sqrt(module.groups / (module.in_channels * module.kernel_size[0])) nn.init.uniform_(module.bias, a=-k, b=k) # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2PreTrainedModel._get_feat_extract_output_lengths with def _get_feat_extract_output_lengths( self, input_lengths: Union[torch.LongTensor, int], add_adapter: Optional[bool] = None ): """ Computes the output length of the convolutional layers """ add_adapter = self.config.add_adapter if add_adapter is None else add_adapter def _conv_out_length(input_length, kernel_size, stride): # 1D convolutional layer output length formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return torch_int_div(input_length - kernel_size, stride) + 1 for kernel_size, stride in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths = _conv_out_length(input_lengths, kernel_size, stride) if add_adapter: for _ in range(self.config.num_adapter_layers): input_lengths = _conv_out_length(input_lengths, 1, self.config.adapter_stride) return input_lengths # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2PreTrainedModel._get_feature_vector_attention_mask def _get_feature_vector_attention_mask( self, feature_vector_length: int, attention_mask: torch.LongTensor, add_adapter=None ): # Effectively attention_mask.sum(-1), but not inplace to be able to run # on inference mode. non_padded_lengths = attention_mask.cumsum(dim=-1)[:, -1] output_lengths = self._get_feat_extract_output_lengths(non_padded_lengths, add_adapter=add_adapter) output_lengths = output_lengths.to(torch.long) batch_size = attention_mask.shape[0] attention_mask = torch.zeros( (batch_size, feature_vector_length), dtype=attention_mask.dtype, device=attention_mask.device ) # these two operations makes sure that all values before the output lengths idxs are attended to attention_mask[(torch.arange(attention_mask.shape[0], device=attention_mask.device), output_lengths - 1)] = 1 attention_mask = attention_mask.flip([-1]).cumsum(-1).flip([-1]).bool() return attention_mask def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, (Data2VecAudioEncoder, Data2VecAudioFeatureEncoder)): module.gradient_checkpointing = value DATA2VEC_AUDIO_START_DOCSTRING = r""" Data2VecAudio was proposed in [data2vec: A General Framework for Self-supervised Learning in Speech, Vision and Language](https://arxiv.org/pdf/2202.03555) by Alexei Baevski, Wei-Ning Hsu, Qiantong Xu, Arun Babu, Jiatao Gu and Michael Auli. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving etc.). This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`Data2VecAudioConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ DATA2VEC_AUDIO_INPUTS_DOCSTRING = r""" Args: input_values (`torch.FloatTensor` of shape `(batch_size, sequence_length)`): Float values of input raw speech waveform. Values can be obtained by loading a *.flac* or *.wav* audio file into an array of type *List[float]* or a *numpy.ndarray*, *e.g.* via the soundfile library (*pip install soundfile*). To prepare the array into *input_values*, the [`AutoProcessor`] should be used for padding and conversion into a tensor of type *torch.FloatTensor*. See [`Wav2Vec2Processor.__call__`] for details. attention_mask (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing convolution and attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) <Tip warning={true}> `attention_mask` should only be passed if the corresponding processor has `config.return_attention_mask == True`. For all models whose processor has `config.return_attention_mask == False`, such as [data2vec-audio-base](https://huggingface.co/facebook/data2vec-audio-base-960h), `attention_mask` should **not** be passed to avoid degraded performance when doing batched inference. For such models `input_values` should simply be padded with 0 and passed without `attention_mask`. Be aware that these models also yield slightly different results depending on whether `input_values` is padded or not. </Tip> output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare Data2VecAudio Model transformer outputting raw hidden-states without any specific head on top.", DATA2VEC_AUDIO_START_DOCSTRING, ) class Data2VecAudioModel(Data2VecAudioPreTrainedModel): def __init__(self, config: Data2VecAudioConfig): super().__init__(config) self.config = config self.feature_extractor = Data2VecAudioFeatureEncoder(config) self.feature_projection = Data2VecAudioFeatureProjection(config) # model only needs masking vector if mask prob is > 0.0 if config.mask_time_prob > 0.0 or config.mask_feature_prob > 0.0: self.masked_spec_embed = nn.Parameter(torch.FloatTensor(config.hidden_size).uniform_()) self.encoder = Data2VecAudioEncoder(config) self.adapter = Data2VecAudioAdapter(config) if config.add_adapter else None # Initialize weights and apply final processing self.post_init() def freeze_feature_encoder(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ self.feature_extractor._freeze_parameters() def _mask_hidden_states( self, hidden_states: torch.FloatTensor, mask_time_indices: Optional[torch.FloatTensor] = None, attention_mask: Optional[torch.LongTensor] = None, ): """ Masks extracted features along time axis and/or along feature axis according to [SpecAugment](https://arxiv.org/abs/1904.08779). """ # `config.apply_spec_augment` can set masking to False if not getattr(self.config, "apply_spec_augment", True): return hidden_states # generate indices & apply SpecAugment along time axis batch_size, sequence_length, hidden_size = hidden_states.size() if mask_time_indices is not None: # apply SpecAugment along time axis with given mask_time_indices hidden_states[mask_time_indices] = self.masked_spec_embed.to(hidden_states.dtype) elif self.config.mask_time_prob > 0 and self.training: mask_time_indices = _compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, attention_mask=attention_mask, min_masks=self.config.mask_time_min_masks, ) mask_time_indices = torch.tensor(mask_time_indices, device=hidden_states.device, dtype=torch.bool) hidden_states[mask_time_indices] = self.masked_spec_embed.to(hidden_states.dtype) if self.config.mask_feature_prob > 0 and self.training: # generate indices & apply SpecAugment along feature axis mask_feature_indices = _compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, min_masks=self.config.mask_feature_min_masks, ) mask_feature_indices = torch.tensor(mask_feature_indices, device=hidden_states.device, dtype=torch.bool) mask_feature_indices = mask_feature_indices[:, None].expand(-1, sequence_length, -1) hidden_states[mask_feature_indices] = 0 return hidden_states @add_start_docstrings_to_model_forward(DATA2VEC_AUDIO_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=Wav2Vec2BaseModelOutput, config_class=_CONFIG_FOR_DOC, modality="audio", expected_output=_EXPECTED_OUTPUT_SHAPE, ) def forward( self, input_values: Optional[torch.Tensor], attention_mask: Optional[torch.Tensor] = None, mask_time_indices: Optional[torch.FloatTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, Wav2Vec2BaseModelOutput]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict extract_features = self.feature_extractor(input_values) extract_features = extract_features.transpose(1, 2) if attention_mask is not None: # compute reduced attention_mask corresponding to feature vectors attention_mask = self._get_feature_vector_attention_mask( extract_features.shape[1], attention_mask, add_adapter=False ) hidden_states, extract_features = self.feature_projection(extract_features) hidden_states = self._mask_hidden_states( hidden_states, mask_time_indices=mask_time_indices, attention_mask=attention_mask ) encoder_outputs = self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) hidden_states = encoder_outputs[0] if self.adapter is not None: hidden_states = self.adapter(hidden_states) if not return_dict: return (hidden_states, extract_features) + encoder_outputs[1:] return Wav2Vec2BaseModelOutput( last_hidden_state=hidden_states, extract_features=extract_features, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) @add_start_docstrings( """Data2VecAudio Model with a `language modeling` head on top for Connectionist Temporal Classification (CTC).""", DATA2VEC_AUDIO_START_DOCSTRING, ) class Data2VecAudioForCTC(Data2VecAudioPreTrainedModel): def __init__(self, config): super().__init__(config) self.data2vec_audio = Data2VecAudioModel(config) self.dropout = nn.Dropout(config.final_dropout) if config.vocab_size is None: raise ValueError( f"You are trying to instantiate {self.__class__} with a configuration that " "does not define the vocabulary size of the language model head. Please " "instantiate the model as follows: `Data2VecAudioForCTC.from_pretrained(..., vocab_size=vocab_size)`. " "or define `vocab_size` of your model's configuration." ) output_hidden_size = ( config.output_hidden_size if hasattr(config, "add_adapter") and config.add_adapter else config.hidden_size ) self.lm_head = nn.Linear(output_hidden_size, config.vocab_size) # Initialize weights and apply final processing self.post_init() def freeze_feature_extractor(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ warnings.warn( "The method `freeze_feature_extractor` is deprecated and will be removed in Transformers v5." "Please use the equivalent `freeze_feature_encoder` method instead.", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ self.data2vec_audio.feature_extractor._freeze_parameters() @add_start_docstrings_to_model_forward(DATA2VEC_AUDIO_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=CausalLMOutput, config_class=_CONFIG_FOR_DOC, expected_output=_CTC_EXPECTED_OUTPUT, expected_loss=_CTC_EXPECTED_LOSS, ) # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2ForCTC.forward with wav2vec2->data2vec_audio def forward( self, input_values: Optional[torch.Tensor], attention_mask: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[torch.Tensor] = None, ) -> Union[Tuple, CausalLMOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, target_length)`, *optional*): Labels for connectionist temporal classification. Note that `target_length` has to be smaller or equal to the sequence length of the output logits. Indices are selected in `[-100, 0, ..., config.vocab_size - 1]`. All labels set to `-100` are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size - 1]`. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.data2vec_audio( input_values, attention_mask=attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) hidden_states = outputs[0] hidden_states = self.dropout(hidden_states) logits = self.lm_head(hidden_states) loss = None if labels is not None: if labels.max() >= self.config.vocab_size: raise ValueError(f"Label values must be <= vocab_size: {self.config.vocab_size}") # retrieve loss input_lengths from attention_mask attention_mask = ( attention_mask if attention_mask is not None else torch.ones_like(input_values, dtype=torch.long) ) input_lengths = self._get_feat_extract_output_lengths(attention_mask.sum(-1)).to(torch.long) # assuming that padded tokens are filled with -100 # when not being attended to labels_mask = labels >= 0 target_lengths = labels_mask.sum(-1) flattened_targets = labels.masked_select(labels_mask) # ctc_loss doesn't support fp16 log_probs = nn.functional.log_softmax(logits, dim=-1, dtype=torch.float32).transpose(0, 1) with torch.backends.cudnn.flags(enabled=False): loss = nn.functional.ctc_loss( log_probs, flattened_targets, input_lengths, target_lengths, blank=self.config.pad_token_id, reduction=self.config.ctc_loss_reduction, zero_infinity=self.config.ctc_zero_infinity, ) if not return_dict: output = (logits,) + outputs[_HIDDEN_STATES_START_POSITION:] return ((loss,) + output) if loss is not None else output return CausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions ) @add_start_docstrings( """ Data2VecAudio Model with a sequence classification head on top (a linear layer over the pooled output) for tasks like SUPERB Keyword Spotting. """, DATA2VEC_AUDIO_START_DOCSTRING, ) class Data2VecAudioForSequenceClassification(Data2VecAudioPreTrainedModel): def __init__(self, config): super().__init__(config) if hasattr(config, "add_adapter") and config.add_adapter: raise ValueError( "Sequence classification does not support the use of Data2VecAudio adapters (config.add_adapter=True)" ) self.data2vec_audio = Data2VecAudioModel(config) num_layers = config.num_hidden_layers + 1 # transformer layers + input embeddings if config.use_weighted_layer_sum: self.layer_weights = nn.Parameter(torch.ones(num_layers) / num_layers) self.projector = nn.Linear(config.hidden_size, config.classifier_proj_size) self.classifier = nn.Linear(config.classifier_proj_size, config.num_labels) # Initialize weights and apply final processing self.post_init() def freeze_feature_extractor(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameters will not be updated during training. """ warnings.warn( "The method `freeze_feature_extractor` is deprecated and will be removed in Transformers v5." "Please use the equivalent `freeze_feature_encoder` method instead.", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ self.data2vec_audio.feature_extractor._freeze_parameters() def freeze_base_model(self): """ Calling this function will disable the gradient computation for the base model so that its parameters will not be updated during training. Only the classification head will be updated. """ for param in self.data2vec_audio.parameters(): param.requires_grad = False @add_start_docstrings_to_model_forward(DATA2VEC_AUDIO_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC, modality="audio", ) # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2ForSequenceClassification.forward with wav2vec2->data2vec_audio def forward( self, input_values: Optional[torch.Tensor], attention_mask: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[torch.Tensor] = None, ) -> Union[Tuple, SequenceClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict output_hidden_states = True if self.config.use_weighted_layer_sum else output_hidden_states outputs = self.data2vec_audio( input_values, attention_mask=attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if self.config.use_weighted_layer_sum: hidden_states = outputs[_HIDDEN_STATES_START_POSITION] hidden_states = torch.stack(hidden_states, dim=1) norm_weights = nn.functional.softmax(self.layer_weights, dim=-1) hidden_states = (hidden_states * norm_weights.view(-1, 1, 1)).sum(dim=1) else: hidden_states = outputs[0] hidden_states = self.projector(hidden_states) if attention_mask is None: pooled_output = hidden_states.mean(dim=1) else: padding_mask = self._get_feature_vector_attention_mask(hidden_states.shape[1], attention_mask) hidden_states[~padding_mask] = 0.0 pooled_output = hidden_states.sum(dim=1) / padding_mask.sum(dim=1).view(-1, 1) logits = self.classifier(pooled_output) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.config.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[_HIDDEN_STATES_START_POSITION:] return ((loss,) + output) if loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ Data2VecAudio Model with a frame classification head on top for tasks like Speaker Diarization. """, DATA2VEC_AUDIO_START_DOCSTRING, ) class Data2VecAudioForAudioFrameClassification(Data2VecAudioPreTrainedModel): def __init__(self, config): super().__init__(config) if hasattr(config, "add_adapter") and config.add_adapter: raise ValueError( "Audio frame classification does not support the use of Data2VecAudio adapters" " (config.add_adapter=True)" ) self.data2vec_audio = Data2VecAudioModel(config) num_layers = config.num_hidden_layers + 1 # transformer layers + input embeddings if config.use_weighted_layer_sum: self.layer_weights = nn.Parameter(torch.ones(num_layers) / num_layers) self.classifier = nn.Linear(config.hidden_size, config.num_labels) self.num_labels = config.num_labels self.init_weights() def freeze_feature_extractor(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ warnings.warn( "The method `freeze_feature_extractor` is deprecated and will be removed in Transformers v5." "Please use the equivalent `freeze_feature_encoder` method instead.", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ self.data2vec_audio.feature_extractor._freeze_parameters() def freeze_base_model(self): """ Calling this function will disable the gradient computation for the base model so that its parameters will not be updated during training. Only the classification head will be updated. """ for param in self.data2vec_audio.parameters(): param.requires_grad = False @add_start_docstrings_to_model_forward(DATA2VEC_AUDIO_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TokenClassifierOutput, config_class=_CONFIG_FOR_DOC, modality="audio", ) # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2ForAudioFrameClassification.forward with wav2vec2->data2vec_audio def forward( self, input_values: Optional[torch.Tensor], attention_mask: Optional[torch.Tensor] = None, labels: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, TokenClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict output_hidden_states = True if self.config.use_weighted_layer_sum else output_hidden_states outputs = self.data2vec_audio( input_values, attention_mask=attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if self.config.use_weighted_layer_sum: hidden_states = outputs[_HIDDEN_STATES_START_POSITION] hidden_states = torch.stack(hidden_states, dim=1) norm_weights = nn.functional.softmax(self.layer_weights, dim=-1) hidden_states = (hidden_states * norm_weights.view(-1, 1, 1)).sum(dim=1) else: hidden_states = outputs[0] logits = self.classifier(hidden_states) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), torch.argmax(labels.view(-1, self.num_labels), axis=1)) if not return_dict: output = (logits,) + outputs[_HIDDEN_STATES_START_POSITION:] return output return TokenClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) # Copied from transformers.models.wav2vec2.modeling_wav2vec2.AMSoftmaxLoss class AMSoftmaxLoss(nn.Module): def __init__(self, input_dim, num_labels, scale=30.0, margin=0.4): super(AMSoftmaxLoss, self).__init__() self.scale = scale self.margin = margin self.num_labels = num_labels self.weight = nn.Parameter(torch.randn(input_dim, num_labels), requires_grad=True) self.loss = nn.CrossEntropyLoss() def forward(self, hidden_states, labels): labels = labels.flatten() weight = nn.functional.normalize(self.weight, dim=0) hidden_states = nn.functional.normalize(hidden_states, dim=1) cos_theta = torch.mm(hidden_states, weight) psi = cos_theta - self.margin onehot = nn.functional.one_hot(labels, self.num_labels) logits = self.scale * torch.where(onehot.bool(), psi, cos_theta) loss = self.loss(logits, labels) return loss # Copied from transformers.models.wav2vec2.modeling_wav2vec2.TDNNLayer class TDNNLayer(nn.Module): def __init__(self, config, layer_id=0): super().__init__() self.in_conv_dim = config.tdnn_dim[layer_id - 1] if layer_id > 0 else config.tdnn_dim[layer_id] self.out_conv_dim = config.tdnn_dim[layer_id] self.kernel_size = config.tdnn_kernel[layer_id] self.dilation = config.tdnn_dilation[layer_id] self.kernel = nn.Linear(self.in_conv_dim * self.kernel_size, self.out_conv_dim) self.activation = nn.ReLU() def forward(self, hidden_states): hidden_states = hidden_states.unsqueeze(1) hidden_states = nn.functional.unfold( hidden_states, (self.kernel_size, self.in_conv_dim), stride=(1, self.in_conv_dim), dilation=(self.dilation, 1), ) hidden_states = hidden_states.transpose(1, 2) hidden_states = self.kernel(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states @add_start_docstrings( """ Data2VecAudio Model with an XVector feature extraction head on top for tasks like Speaker Verification. """, DATA2VEC_AUDIO_START_DOCSTRING, ) class Data2VecAudioForXVector(Data2VecAudioPreTrainedModel): def __init__(self, config): super().__init__(config) self.data2vec_audio = Data2VecAudioModel(config) num_layers = config.num_hidden_layers + 1 # transformer layers + input embeddings if config.use_weighted_layer_sum: self.layer_weights = nn.Parameter(torch.ones(num_layers) / num_layers) self.projector = nn.Linear(config.hidden_size, config.tdnn_dim[0]) tdnn_layers = [TDNNLayer(config, i) for i in range(len(config.tdnn_dim))] self.tdnn = nn.ModuleList(tdnn_layers) self.feature_extractor = nn.Linear(config.tdnn_dim[-1] * 2, config.xvector_output_dim) self.classifier = nn.Linear(config.xvector_output_dim, config.xvector_output_dim) self.objective = AMSoftmaxLoss(config.xvector_output_dim, config.num_labels) self.init_weights() def freeze_feature_extractor(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ warnings.warn( "The method `freeze_feature_extractor` is deprecated and will be removed in Transformers v5." "Please use the equivalent `freeze_feature_encoder` method instead.", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): """ Calling this function will disable the gradient computation for the feature encoder so that its parameter will not be updated during training. """ self.data2vec_audio.feature_extractor._freeze_parameters() def freeze_base_model(self): """ Calling this function will disable the gradient computation for the base model so that its parameters will not be updated during training. Only the classification head will be updated. """ for param in self.data2vec_audio.parameters(): param.requires_grad = False def _get_tdnn_output_lengths(self, input_lengths: Union[torch.LongTensor, int]): """ Computes the output length of the TDNN layers """ def _conv_out_length(input_length, kernel_size, stride): # 1D convolutional layer output length formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length - kernel_size) // stride + 1 for kernel_size in self.config.tdnn_kernel: input_lengths = _conv_out_length(input_lengths, kernel_size, 1) return input_lengths @add_start_docstrings_to_model_forward(DATA2VEC_AUDIO_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=XVectorOutput, config_class=_CONFIG_FOR_DOC, modality="audio", ) # Copied from transformers.models.wav2vec2.modeling_wav2vec2.Wav2Vec2ForXVector.forward with wav2vec2->data2vec_audio def forward( self, input_values: Optional[torch.Tensor], attention_mask: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[torch.Tensor] = None, ) -> Union[Tuple, XVectorOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict output_hidden_states = True if self.config.use_weighted_layer_sum else output_hidden_states outputs = self.data2vec_audio( input_values, attention_mask=attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if self.config.use_weighted_layer_sum: hidden_states = outputs[_HIDDEN_STATES_START_POSITION] hidden_states = torch.stack(hidden_states, dim=1) norm_weights = nn.functional.softmax(self.layer_weights, dim=-1) hidden_states = (hidden_states * norm_weights.view(-1, 1, 1)).sum(dim=1) else: hidden_states = outputs[0] hidden_states = self.projector(hidden_states) for tdnn_layer in self.tdnn: hidden_states = tdnn_layer(hidden_states) # Statistic Pooling if attention_mask is None: mean_features = hidden_states.mean(dim=1) std_features = hidden_states.std(dim=1) else: feat_extract_output_lengths = self._get_feat_extract_output_lengths(attention_mask.sum(dim=1)) tdnn_output_lengths = self._get_tdnn_output_lengths(feat_extract_output_lengths) mean_features = [] std_features = [] for i, length in enumerate(tdnn_output_lengths): mean_features.append(hidden_states[i, :length].mean(dim=0)) std_features.append(hidden_states[i, :length].std(dim=0)) mean_features = torch.stack(mean_features) std_features = torch.stack(std_features) statistic_pooling = torch.cat([mean_features, std_features], dim=-1) output_embeddings = self.feature_extractor(statistic_pooling) logits = self.classifier(output_embeddings) loss = None if labels is not None: loss = self.objective(logits, labels) if not return_dict: output = (logits, output_embeddings) + outputs[_HIDDEN_STATES_START_POSITION:] return ((loss,) + output) if loss is not None else output return XVectorOutput( loss=loss, logits=logits, embeddings=output_embeddings, hidden_states=outputs.hidden_states, attentions=outputs.attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
9,605
src/transformers/models/data2vec/convert_data2vec_text_original_pytorch_checkpoint_to_pytorch.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert data2vec checkpoint.""" import argparse import os import pathlib import fairseq import torch from fairseq.modules import TransformerSentenceEncoderLayer from packaging import version from transformers import Data2VecTextConfig, Data2VecTextForMaskedLM, Data2VecTextForSequenceClassification from transformers.models.bert.modeling_bert import ( BertIntermediate, BertLayer, BertOutput, BertSelfAttention, BertSelfOutput, ) # IMPORTANT: In order for this script to run, please make sure to download the dictionary: `dict.txt` from wget https://dl.fbaipublicfiles.com/fairseq/models/roberta.large.tar.gz # File copied from https://github.com/pytorch/fairseq/blob/main/examples/data2vec/models/data2vec_text.py from transformers.models.data2vec.data2vec_text import Data2VecTextModel from transformers.utils import logging if version.parse(fairseq.__version__) < version.parse("0.9.0"): raise Exception("requires fairseq >= 0.9.0") logging.set_verbosity_info() logger = logging.get_logger(__name__) SAMPLE_TEXT = "Hello world! cécé herlolip" def convert_data2vec_checkpoint_to_pytorch( data2vec_checkpoint_path: str, pytorch_dump_folder_path: str, classification_head: bool ): """ Copy/paste/tweak data2vec's weights to our BERT structure. """ data2vec_checkpoint_dir, data2vec_checkpoint_file_name = os.path.split(data2vec_checkpoint_path) data2vec = Data2VecTextModel.from_pretrained( data2vec_checkpoint_dir, checkpoint_file=data2vec_checkpoint_file_name ) data2vec.eval() # disable dropout data2vec_model = data2vec.models[0] data2vec_sent_encoder = data2vec_model.encoder.sentence_encoder config = Data2VecTextConfig( vocab_size=data2vec_sent_encoder.embed_tokens.num_embeddings, hidden_size=data2vec_model.args.encoder_embed_dim, num_hidden_layers=data2vec_model.args.encoder_layers, num_attention_heads=data2vec_model.args.encoder_attention_heads, intermediate_size=data2vec_model.args.encoder_ffn_embed_dim, max_position_embeddings=514, type_vocab_size=1, layer_norm_eps=1e-5, # PyTorch default used in fairseq ) if classification_head: config.num_labels = data2vec.model.classification_heads["mnli"].out_proj.weight.shape[0] print("Our BERT config:", config) model = Data2VecTextForSequenceClassification(config) if classification_head else Data2VecTextForMaskedLM(config) model.eval() # Now let's copy all the weights. # Embeddings model.data2vec_text.embeddings.word_embeddings.weight = data2vec_sent_encoder.embed_tokens.weight model.data2vec_text.embeddings.position_embeddings.weight = data2vec_sent_encoder.embed_positions.weight model.data2vec_text.embeddings.token_type_embeddings.weight.data = torch.zeros_like( model.data2vec_text.embeddings.token_type_embeddings.weight ) # just zero them out b/c data2vec doesn't use them. model.data2vec_text.embeddings.LayerNorm.weight = data2vec_sent_encoder.layernorm_embedding.weight model.data2vec_text.embeddings.LayerNorm.bias = data2vec_sent_encoder.layernorm_embedding.bias for i in range(config.num_hidden_layers): # Encoder: start of layer layer: BertLayer = model.data2vec_text.encoder.layer[i] data2vec_layer: TransformerSentenceEncoderLayer = data2vec_sent_encoder.layers[i] # self attention self_attn: BertSelfAttention = layer.attention.self assert data2vec_layer.self_attn.k_proj.weight.data.shape == torch.Size( (config.hidden_size, config.hidden_size) ), ( "Shape for data2vec_layer.self_attn.k_proj.weight.data should be" f" {torch.Size((config.hidden_size, config.hidden_size))}" ) assert data2vec_layer.self_attn.q_proj.weight.data.shape == torch.Size( (config.hidden_size, config.hidden_size) ), ( "Shape for data2vec_layer.self_attn.q_proj.weight.data should be" f" {torch.Size((config.hidden_size, config.hidden_size))}" ) assert data2vec_layer.self_attn.v_proj.weight.data.shape == torch.Size( (config.hidden_size, config.hidden_size) ), ( "Shape for data2vec_layer.self_attn.v_proj.weight.data should be" f" {torch.Size((config.hidden_size, config.hidden_size))}" ) self_attn.query.weight.data = data2vec_layer.self_attn.q_proj.weight self_attn.query.bias.data = data2vec_layer.self_attn.q_proj.bias self_attn.key.weight.data = data2vec_layer.self_attn.k_proj.weight self_attn.key.bias.data = data2vec_layer.self_attn.k_proj.bias self_attn.value.weight.data = data2vec_layer.self_attn.v_proj.weight self_attn.value.bias.data = data2vec_layer.self_attn.v_proj.bias # self-attention output self_output: BertSelfOutput = layer.attention.output assert ( self_output.dense.weight.shape == data2vec_layer.self_attn.out_proj.weight.shape ), f"Shape for self_output.dense.weight should be {data2vec_layer.self_attn.out_proj.weight.shape}" self_output.dense.weight = data2vec_layer.self_attn.out_proj.weight self_output.dense.bias = data2vec_layer.self_attn.out_proj.bias self_output.LayerNorm.weight = data2vec_layer.self_attn_layer_norm.weight self_output.LayerNorm.bias = data2vec_layer.self_attn_layer_norm.bias # intermediate intermediate: BertIntermediate = layer.intermediate assert ( intermediate.dense.weight.shape == data2vec_layer.fc1.weight.shape ), f"Shape for intermediate.dense.weight should be {data2vec_layer.fc1.weight.shape}" intermediate.dense.weight = data2vec_layer.fc1.weight intermediate.dense.bias = data2vec_layer.fc1.bias # output bert_output: BertOutput = layer.output assert ( bert_output.dense.weight.shape == data2vec_layer.fc2.weight.shape ), f"Shape for bert_output.dense.weight should be {data2vec_layer.fc2.weight.shape}" bert_output.dense.weight = data2vec_layer.fc2.weight bert_output.dense.bias = data2vec_layer.fc2.bias bert_output.LayerNorm.weight = data2vec_layer.final_layer_norm.weight bert_output.LayerNorm.bias = data2vec_layer.final_layer_norm.bias # end of layer if classification_head: model.classifier.dense.weight = data2vec.model.classification_heads["mnli"].dense.weight model.classifier.dense.bias = data2vec.model.classification_heads["mnli"].dense.bias model.classifier.out_proj.weight = data2vec.model.classification_heads["mnli"].out_proj.weight model.classifier.out_proj.bias = data2vec.model.classification_heads["mnli"].out_proj.bias else: # LM Head model.lm_head.dense.weight = data2vec_model.encoder.lm_head.dense.weight model.lm_head.dense.bias = data2vec_model.encoder.lm_head.dense.bias model.lm_head.layer_norm.weight = data2vec_model.encoder.lm_head.layer_norm.weight model.lm_head.layer_norm.bias = data2vec_model.encoder.lm_head.layer_norm.bias model.lm_head.decoder.weight = data2vec_model.encoder.lm_head.weight model.lm_head.decoder.bias = data2vec_model.encoder.lm_head.bias # Let's check that we get the same results. input_ids: torch.Tensor = data2vec.encode(SAMPLE_TEXT).unsqueeze(0) # batch of size 1 our_output = model(input_ids)[0] if classification_head: their_output = data2vec.model.classification_heads["mnli"](data2vec.extract_features(input_ids)) else: their_output = data2vec_model(input_ids)[0] print(our_output.shape, their_output.shape) max_absolute_diff = torch.max(torch.abs(our_output - their_output)).item() print(f"max_absolute_diff = {max_absolute_diff}") # ~ 1e-7 success = torch.allclose(our_output, their_output, atol=1e-3) print("Do both models output the same tensors?", "🔥" if success else "💩") if not success: raise Exception("Something went wRoNg") pathlib.Path(pytorch_dump_folder_path).mkdir(parents=True, exist_ok=True) print(f"Saving model to {pytorch_dump_folder_path}") model.save_pretrained(pytorch_dump_folder_path) if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--checkpoint_path", default=None, type=str, required=True, help="Path the official PyTorch dump." ) parser.add_argument( "--pytorch_dump_folder_path", default=None, type=str, required=True, help="Path to the output PyTorch model." ) parser.add_argument( "--classification_head", action="store_true", help="Whether to convert a final classification head." ) args = parser.parse_args() convert_data2vec_checkpoint_to_pytorch( args.checkpoint_path, args.pytorch_dump_folder_path, args.classification_head )
27182812/ChatGLM-LLaMA-chinese-insturct
52,884
src/transformers/models/data2vec/modeling_data2vec_vision.py
# coding=utf-8 # Copyright 2022 Meta Platforms and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch Data2VecVision model.""" import collections.abc import math from dataclasses import dataclass from typing import List, Optional, Tuple, Union import torch import torch.utils.checkpoint from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACT2FN from ...modeling_outputs import ( BaseModelOutput, BaseModelOutputWithPooling, ImageClassifierOutput, SemanticSegmenterOutput, ) from ...modeling_utils import PreTrainedModel from ...pytorch_utils import find_pruneable_heads_and_indices, meshgrid, prune_linear_layer from ...utils import ( add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_data2vec_vision import Data2VecVisionConfig logger = logging.get_logger(__name__) # General docstring _CONFIG_FOR_DOC = "Data2VecVisionConfig" # Base docstring _CHECKPOINT_FOR_DOC = "facebook/data2vec-vision-base" _EXPECTED_OUTPUT_SHAPE = [1, 197, 768] # Image classification docstring _IMAGE_CLASS_CHECKPOINT = "facebook/data2vec-vision-base-ft1k" _IMAGE_CLASS_EXPECTED_OUTPUT = "remote control, remote" DATA2VEC_VISION_PRETRAINED_MODEL_ARCHIVE_LIST = [ "facebook/data2vec-vision-base-ft1k", # See all Data2VecVision models at https://huggingface.co/models?filter=data2vec-vision ] @dataclass # Copied from transformers.models.beit.modeling_beit.BeitModelOutputWithPooling with Beit->Data2VecVision class Data2VecVisionModelOutputWithPooling(BaseModelOutputWithPooling): """ Class for outputs of [`Data2VecVisionModel`]. Args: last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the model. pooler_output (`torch.FloatTensor` of shape `(batch_size, hidden_size)`): Average of the last layer hidden states of the patch tokens (excluding the *[CLS]* token) if *config.use_mean_pooling* is set to True. If set to False, then the final hidden state of the *[CLS]* token will be returned. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ # Copied from transformers.models.beit.modeling_beit.drop_path def drop_path(input: torch.Tensor, drop_prob: float = 0.0, training: bool = False) -> torch.Tensor: """ Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks, however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper... See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the argument. """ if drop_prob == 0.0 or not training: return input keep_prob = 1 - drop_prob shape = (input.shape[0],) + (1,) * (input.ndim - 1) # work with diff dim tensors, not just 2D ConvNets random_tensor = keep_prob + torch.rand(shape, dtype=input.dtype, device=input.device) random_tensor.floor_() # binarize output = input.div(keep_prob) * random_tensor return output # Copied from transformers.models.beit.modeling_beit.BeitDropPath with Beit->Data2VecVision class Data2VecVisionDropPath(nn.Module): """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" def __init__(self, drop_prob: Optional[float] = None) -> None: super().__init__() self.drop_prob = drop_prob def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: return drop_path(hidden_states, self.drop_prob, self.training) def extra_repr(self) -> str: return "p={}".format(self.drop_prob) # Copied from transformers.models.beit.modeling_beit.BeitEmbeddings with Beit->Data2VecVision class Data2VecVisionEmbeddings(nn.Module): """ Construct the CLS token, position and patch embeddings. Optionally, also the mask token. """ def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__() self.cls_token = nn.Parameter(torch.zeros(1, 1, config.hidden_size)) if config.use_mask_token: self.mask_token = nn.Parameter(torch.zeros(1, 1, config.hidden_size)) else: self.mask_token = None self.patch_embeddings = Data2VecVisionPatchEmbeddings(config) num_patches = self.patch_embeddings.num_patches if config.use_absolute_position_embeddings: self.position_embeddings = nn.Parameter(torch.zeros(1, num_patches + 1, config.hidden_size)) else: self.position_embeddings = None self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, pixel_values: torch.Tensor, bool_masked_pos: Optional[torch.BoolTensor] = None) -> torch.Tensor: embeddings = self.patch_embeddings(pixel_values) batch_size, seq_len, _ = embeddings.size() cls_tokens = self.cls_token.expand(batch_size, -1, -1) if bool_masked_pos is not None: mask_tokens = self.mask_token.expand(batch_size, seq_len, -1) # replace the masked visual tokens by mask_tokens w = bool_masked_pos.unsqueeze(-1).type_as(mask_tokens) embeddings = embeddings * (1 - w) + mask_tokens * w embeddings = torch.cat((cls_tokens, embeddings), dim=1) if self.position_embeddings is not None: embeddings = embeddings + self.position_embeddings embeddings = self.dropout(embeddings) return embeddings # Copied from transformers.models.beit.modeling_beit.BeitPatchEmbeddings with Beit->Data2VecVision class Data2VecVisionPatchEmbeddings(nn.Module): """ This class turns `pixel_values` of shape `(batch_size, num_channels, height, width)` into the initial `hidden_states` (patch embeddings) of shape `(batch_size, seq_length, hidden_size)` to be consumed by a Transformer. """ def __init__(self, config): super().__init__() image_size, patch_size = config.image_size, config.patch_size num_channels, hidden_size = config.num_channels, config.hidden_size image_size = image_size if isinstance(image_size, collections.abc.Iterable) else (image_size, image_size) patch_size = patch_size if isinstance(patch_size, collections.abc.Iterable) else (patch_size, patch_size) num_patches = (image_size[1] // patch_size[1]) * (image_size[0] // patch_size[0]) patch_shape = (image_size[0] // patch_size[0], image_size[1] // patch_size[1]) self.image_size = image_size self.patch_size = patch_size self.num_channels = num_channels self.num_patches = num_patches self.patch_shape = patch_shape self.projection = nn.Conv2d(num_channels, hidden_size, kernel_size=patch_size, stride=patch_size) def forward(self, pixel_values: torch.Tensor) -> torch.Tensor: batch_size, num_channels, height, width = pixel_values.shape if num_channels != self.num_channels: raise ValueError( "Make sure that the channel dimension of the pixel values match with the one set in the configuration." ) if height != self.image_size[0] or width != self.image_size[1]: raise ValueError( f"Input image size ({height}*{width}) doesn't match model ({self.image_size[0]}*{self.image_size[1]})." ) embeddings = self.projection(pixel_values).flatten(2).transpose(1, 2) return embeddings # Copied from transformers.models.beit.modeling_beit.BeitSelfAttention with Beit->Data2VecVision class Data2VecVisionSelfAttention(nn.Module): def __init__(self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None) -> None: super().__init__() if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): raise ValueError( f"The hidden size {config.hidden_size,} is not a multiple of the number of attention " f"heads {config.num_attention_heads}." ) self.num_attention_heads = config.num_attention_heads self.attention_head_size = int(config.hidden_size / config.num_attention_heads) self.all_head_size = self.num_attention_heads * self.attention_head_size self.query = nn.Linear(config.hidden_size, self.all_head_size) self.key = nn.Linear(config.hidden_size, self.all_head_size, bias=False) self.value = nn.Linear(config.hidden_size, self.all_head_size) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) if window_size: self.relative_position_bias = Data2VecVisionRelativePositionBias(config, window_size=window_size) else: self.relative_position_bias = None def transpose_for_scores(self, x): new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) x = x.view(*new_x_shape) return x.permute(0, 2, 1, 3) def forward( self, hidden_states: torch.Tensor, head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, relative_position_bias: Optional["Data2VecVisionRelativePositionBias"] = None, ) -> Union[Tuple[torch.Tensor], Tuple[torch.Tensor, torch.Tensor]]: mixed_query_layer = self.query(hidden_states) key_layer = self.transpose_for_scores(self.key(hidden_states)) value_layer = self.transpose_for_scores(self.value(hidden_states)) query_layer = self.transpose_for_scores(mixed_query_layer) # Take the dot product between "query" and "key" to get the raw attention scores. attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) attention_scores = attention_scores / math.sqrt(self.attention_head_size) # Add relative position bias if present. if self.relative_position_bias is not None: attention_scores = attention_scores + self.relative_position_bias().unsqueeze(0) # Add shared relative position bias if provided. if relative_position_bias is not None: attention_scores = attention_scores + relative_position_bias # Normalize the attention scores to probabilities. attention_probs = nn.functional.softmax(attention_scores, dim=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs) # Mask heads if we want to if head_mask is not None: attention_probs = attention_probs * head_mask context_layer = torch.matmul(attention_probs, value_layer) context_layer = context_layer.permute(0, 2, 1, 3).contiguous() new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) context_layer = context_layer.view(*new_context_layer_shape) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) return outputs # Copied from transformers.models.beit.modeling_beit.BeitSelfOutput with Beit->Data2VecVision class Data2VecVisionSelfOutput(nn.Module): """ The residual connection is defined in Data2VecVisionLayer instead of here (as is the case with other models), due to the layernorm applied before each block. """ def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor, gamma=None) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) return hidden_states # Copied from transformers.models.beit.modeling_beit.BeitAttention with Beit->Data2VecVision class Data2VecVisionAttention(nn.Module): def __init__(self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None) -> None: super().__init__() self.attention = Data2VecVisionSelfAttention(config, window_size=window_size) self.output = Data2VecVisionSelfOutput(config) self.pruned_heads = set() def prune_heads(self, heads): if len(heads) == 0: return heads, index = find_pruneable_heads_and_indices( heads, self.attention.num_attention_heads, self.attention.attention_head_size, self.pruned_heads ) # Prune linear layers self.attention.query = prune_linear_layer(self.attention.query, index) self.attention.key = prune_linear_layer(self.attention.key, index) self.attention.value = prune_linear_layer(self.attention.value, index) self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) # Update hyper params and store pruned heads self.attention.num_attention_heads = self.attention.num_attention_heads - len(heads) self.attention.all_head_size = self.attention.attention_head_size * self.attention.num_attention_heads self.pruned_heads = self.pruned_heads.union(heads) def forward( self, hidden_states: torch.Tensor, head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, relative_position_bias: Optional["Data2VecVisionRelativePositionBias"] = None, ) -> Union[Tuple[torch.Tensor], Tuple[torch.Tensor, torch.Tensor]]: self_outputs = self.attention(hidden_states, head_mask, output_attentions, relative_position_bias) attention_output = self.output(self_outputs[0], hidden_states) outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them return outputs # Copied from transformers.models.beit.modeling_beit.BeitIntermediate with Beit->Data2VecVision class Data2VecVisionIntermediate(nn.Module): def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__() self.dense = nn.Linear(config.hidden_size, config.intermediate_size) if isinstance(config.hidden_act, str): self.intermediate_act_fn = ACT2FN[config.hidden_act] else: self.intermediate_act_fn = config.hidden_act def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states # Copied from transformers.models.beit.modeling_beit.BeitOutput with Beit->Data2VecVision class Data2VecVisionOutput(nn.Module): def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__() self.dense = nn.Linear(config.intermediate_size, config.hidden_size) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) return hidden_states # Copied from transformers.models.beit.modeling_beit.BeitLayer with Beit->Data2VecVision,BEiT->Data2VecVision class Data2VecVisionLayer(nn.Module): """This corresponds to the Block class in the timm implementation.""" def __init__( self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None, drop_path_rate: float = 0.0 ) -> None: super().__init__() self.chunk_size_feed_forward = config.chunk_size_feed_forward self.seq_len_dim = 1 self.attention = Data2VecVisionAttention(config, window_size=window_size) self.intermediate = Data2VecVisionIntermediate(config) self.output = Data2VecVisionOutput(config) self.layernorm_before = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.drop_path = Data2VecVisionDropPath(drop_path_rate) if drop_path_rate > 0.0 else nn.Identity() self.layernorm_after = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) init_values = config.layer_scale_init_value if init_values > 0: self.lambda_1 = nn.Parameter(init_values * torch.ones((config.hidden_size)), requires_grad=True) self.lambda_2 = nn.Parameter(init_values * torch.ones((config.hidden_size)), requires_grad=True) else: self.lambda_1, self.lambda_2 = None, None def forward( self, hidden_states: torch.Tensor, head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, relative_position_bias: Optional["Data2VecVisionRelativePositionBias"] = None, ) -> Union[Tuple[torch.Tensor], Tuple[torch.Tensor, torch.Tensor]]: self_attention_outputs = self.attention( self.layernorm_before(hidden_states), # in Data2VecVision, layernorm is applied before self-attention head_mask, output_attentions=output_attentions, relative_position_bias=relative_position_bias, ) attention_output = self_attention_outputs[0] outputs = self_attention_outputs[1:] # add self attentions if we output attention weights # apply lambda_1 if present if self.lambda_1 is not None: attention_output = self.lambda_1 * attention_output # first residual connection hidden_states = self.drop_path(attention_output) + hidden_states # in Data2VecVision, layernorm is also applied after self-attention layer_output = self.layernorm_after(hidden_states) layer_output = self.intermediate(layer_output) layer_output = self.output(layer_output) if self.lambda_2 is not None: layer_output = self.lambda_2 * layer_output # second residual connection layer_output = self.drop_path(layer_output) + hidden_states outputs = (layer_output,) + outputs return outputs # Copied from transformers.models.beit.modeling_beit.BeitRelativePositionBias with Beit->Data2VecVision class Data2VecVisionRelativePositionBias(nn.Module): def __init__(self, config: Data2VecVisionConfig, window_size: tuple) -> None: super().__init__() self.window_size = window_size self.num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3 self.relative_position_bias_table = nn.Parameter( torch.zeros(self.num_relative_distance, config.num_attention_heads) ) # 2*Wh-1 * 2*Ww-1, nH # cls to token & token 2 cls & cls to cls # get pair-wise relative position index for each token inside the window coords_h = torch.arange(window_size[0]) coords_w = torch.arange(window_size[1]) coords = torch.stack(meshgrid([coords_h, coords_w], indexing="ij")) # 2, Wh, Ww coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2 relative_coords[:, :, 0] += window_size[0] - 1 # shift to start from 0 relative_coords[:, :, 1] += window_size[1] - 1 relative_coords[:, :, 0] *= 2 * window_size[1] - 1 relative_position_index = torch.zeros( size=(window_size[0] * window_size[1] + 1,) * 2, dtype=relative_coords.dtype ) relative_position_index[1:, 1:] = relative_coords.sum(-1) # Wh*Ww, Wh*Ww relative_position_index[0, 0:] = self.num_relative_distance - 3 relative_position_index[0:, 0] = self.num_relative_distance - 2 relative_position_index[0, 0] = self.num_relative_distance - 1 self.register_buffer("relative_position_index", relative_position_index) def forward(self) -> torch.Tensor: relative_position_bias = self.relative_position_bias_table[self.relative_position_index.view(-1)].view( self.window_size[0] * self.window_size[1] + 1, self.window_size[0] * self.window_size[1] + 1, -1 ) # Wh*Ww,Wh*Ww,nH return relative_position_bias.permute(2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww # Copied from transformers.models.beit.modeling_beit.BeitEncoder with Beit->Data2VecVision class Data2VecVisionEncoder(nn.Module): def __init__(self, config: Data2VecVisionConfig, window_size: Optional[tuple] = None) -> None: super().__init__() self.config = config if config.use_shared_relative_position_bias: self.relative_position_bias = Data2VecVisionRelativePositionBias(config, window_size=window_size) else: self.relative_position_bias = None # stochastic depth decay rule dpr = [x.item() for x in torch.linspace(0, config.drop_path_rate, config.num_hidden_layers)] self.layer = nn.ModuleList( [ Data2VecVisionLayer( config, window_size=window_size if config.use_relative_position_bias else None, drop_path_rate=dpr[i], ) for i in range(config.num_hidden_layers) ] ) self.gradient_checkpointing = False def forward( self, hidden_states: torch.Tensor, head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, ) -> Union[tuple, BaseModelOutput]: all_hidden_states = () if output_hidden_states else None all_self_attentions = () if output_attentions else None for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_head_mask = head_mask[i] if head_mask is not None else None if self.gradient_checkpointing and self.training: def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(layer_module), hidden_states, layer_head_mask, ) else: relative_position_bias = ( self.relative_position_bias() if self.relative_position_bias is not None else None ) layer_outputs = layer_module(hidden_states, layer_head_mask, output_attentions, relative_position_bias) hidden_states = layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if v is not None) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied from transformers.models.beit.modeling_beit.BeitPreTrainedModel with Beit->Data2VecVision,beit->data2vec_vision class Data2VecVisionPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = Data2VecVisionConfig base_model_prefix = "data2vec_vision" main_input_name = "pixel_values" supports_gradient_checkpointing = True def _init_weights(self, module): """Initialize the weights""" if isinstance(module, (nn.Linear, nn.Conv2d, nn.ConvTranspose2d)): # Slightly different from the TF version which uses truncated_normal for initialization # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() elif isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, Data2VecVisionEncoder): module.gradient_checkpointing = value DATA2VEC_VISION_START_DOCSTRING = r""" This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`Data2VecVisionConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ DATA2VEC_VISION_INPUTS_DOCSTRING = r""" Args: pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): Pixel values. Pixel values can be obtained using [`AutoImageProcessor`]. See [`BeitImageProcessor.__call__`] for details. head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare Data2VecVision Model transformer outputting raw hidden-states without any specific head on top.", DATA2VEC_VISION_START_DOCSTRING, ) # Copied from transformers.models.beit.modeling_beit.BeitModel with BEIT->DATA2VEC_VISION,Beit->Data2VecVision,True->False class Data2VecVisionModel(Data2VecVisionPreTrainedModel): def __init__(self, config: Data2VecVisionConfig, add_pooling_layer: bool = False) -> None: super().__init__(config) self.config = config self.embeddings = Data2VecVisionEmbeddings(config) self.encoder = Data2VecVisionEncoder(config, window_size=self.embeddings.patch_embeddings.patch_shape) self.layernorm = ( nn.Identity() if config.use_mean_pooling else nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) ) self.pooler = Data2VecVisionPooler(config) if add_pooling_layer else None # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embeddings.patch_embeddings def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): self.encoder.layer[layer].attention.prune_heads(heads) @add_start_docstrings_to_model_forward(DATA2VEC_VISION_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=Data2VecVisionModelOutputWithPooling, config_class=_CONFIG_FOR_DOC, modality="vision", expected_output=_EXPECTED_OUTPUT_SHAPE, ) def forward( self, pixel_values: Optional[torch.Tensor] = None, bool_masked_pos: Optional[torch.BoolTensor] = None, head_mask: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[tuple, Data2VecVisionModelOutputWithPooling]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if pixel_values is None: raise ValueError("You have to specify pixel_values") # Prepare head mask if needed # 1.0 in head_mask indicate we keep the head # attention_probs has shape bsz x n_heads x N x N # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) embedding_output = self.embeddings(pixel_values, bool_masked_pos) encoder_outputs = self.encoder( embedding_output, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] sequence_output = self.layernorm(sequence_output) pooled_output = self.pooler(sequence_output) if self.pooler is not None else None if not return_dict: head_outputs = (sequence_output, pooled_output) if pooled_output is not None else (sequence_output,) return head_outputs + encoder_outputs[1:] return Data2VecVisionModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) # Copied from transformers.models.beit.modeling_beit.BeitPooler with Beit->Data2VecVision class Data2VecVisionPooler(nn.Module): def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__() self.layernorm = ( nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) if config.use_mean_pooling else None ) def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: if self.layernorm is not None: # Mean pool the final hidden states of the patch tokens patch_tokens = hidden_states[:, 1:, :] pooled_output = self.layernorm(patch_tokens.mean(1)) else: # Pool by simply taking the final hidden state of the [CLS] token pooled_output = hidden_states[:, 0] return pooled_output @add_start_docstrings( """ Data2VecVision Model transformer with an image classification head on top (a linear layer on top of the average of the final hidden states of the patch tokens) e.g. for ImageNet. """, DATA2VEC_VISION_START_DOCSTRING, ) # Copied from transformers.models.beit.modeling_beit.BeitForImageClassification with BEIT->DATA2VEC_VISION,Beit->Data2VecVision,beit->data2vec_vision class Data2VecVisionForImageClassification(Data2VecVisionPreTrainedModel): def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__(config) self.num_labels = config.num_labels self.data2vec_vision = Data2VecVisionModel(config, add_pooling_layer=True) # Classifier head self.classifier = nn.Linear(config.hidden_size, config.num_labels) if config.num_labels > 0 else nn.Identity() # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(DATA2VEC_VISION_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_IMAGE_CLASS_CHECKPOINT, output_type=ImageClassifierOutput, config_class=_CONFIG_FOR_DOC, expected_output=_IMAGE_CLASS_EXPECTED_OUTPUT, ) def forward( self, pixel_values: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, labels: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[tuple, ImageClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the image classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.data2vec_vision( pixel_values, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) pooled_output = outputs.pooler_output if return_dict else outputs[1] logits = self.classifier(pooled_output) loss = None if labels is not None: if self.config.problem_type is None: if self.num_labels == 1: self.config.problem_type = "regression" elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): self.config.problem_type = "single_label_classification" else: self.config.problem_type = "multi_label_classification" if self.config.problem_type == "regression": loss_fct = MSELoss() if self.num_labels == 1: loss = loss_fct(logits.squeeze(), labels.squeeze()) else: loss = loss_fct(logits, labels) elif self.config.problem_type == "single_label_classification": loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) elif self.config.problem_type == "multi_label_classification": loss_fct = BCEWithLogitsLoss() loss = loss_fct(logits, labels) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return ImageClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) # Copied from transformers.models.beit.modeling_beit.BeitConvModule with Beit->Data2VecVision class Data2VecVisionConvModule(nn.Module): """ A convolutional block that bundles conv/norm/activation layers. This block simplifies the usage of convolution layers, which are commonly used with a norm layer (e.g., BatchNorm) and activation layer (e.g., ReLU). Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__( self, in_channels: int, out_channels: int, kernel_size: Union[int, Tuple[int, int]], padding: Union[int, Tuple[int, int], str] = 0, bias: bool = False, dilation: Union[int, Tuple[int, int]] = 1, ) -> None: super().__init__() self.conv = nn.Conv2d( in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, padding=padding, bias=bias, dilation=dilation, ) self.bn = nn.BatchNorm2d(out_channels) self.activation = nn.ReLU() def forward(self, input: torch.Tensor) -> torch.Tensor: output = self.conv(input) output = self.bn(output) output = self.activation(output) return output # Copied from transformers.models.beit.modeling_beit.BeitPyramidPoolingBlock with Beit->Data2VecVision class Data2VecVisionPyramidPoolingBlock(nn.Module): def __init__(self, pool_scale: int, in_channels: int, channels: int) -> None: super().__init__() self.layers = [ nn.AdaptiveAvgPool2d(pool_scale), Data2VecVisionConvModule(in_channels, channels, kernel_size=1), ] for i, layer in enumerate(self.layers): self.add_module(str(i), layer) def forward(self, input: torch.Tensor) -> torch.Tensor: hidden_state = input for layer in self.layers: hidden_state = layer(hidden_state) return hidden_state # Copied from transformers.models.beit.modeling_beit.BeitPyramidPoolingModule with Beit->Data2VecVision class Data2VecVisionPyramidPoolingModule(nn.Module): """ Pyramid Pooling Module (PPM) used in PSPNet. Args: pool_scales (tuple[int]): Pooling scales used in Pooling Pyramid Module. in_channels (int): Input channels. channels (int): Channels after modules, before conv_seg. align_corners (bool): align_corners argument of F.interpolate. Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__(self, pool_scales: Tuple[int, ...], in_channels: int, channels: int, align_corners: bool) -> None: super().__init__() self.pool_scales = pool_scales self.align_corners = align_corners self.in_channels = in_channels self.channels = channels self.blocks = [] for i, pool_scale in enumerate(pool_scales): block = Data2VecVisionPyramidPoolingBlock( pool_scale=pool_scale, in_channels=in_channels, channels=channels ) self.blocks.append(block) self.add_module(str(i), block) def forward(self, x: torch.Tensor) -> List[torch.Tensor]: ppm_outs = [] for ppm in self.blocks: ppm_out = ppm(x) upsampled_ppm_out = nn.functional.interpolate( ppm_out, size=x.size()[2:], mode="bilinear", align_corners=self.align_corners ) ppm_outs.append(upsampled_ppm_out) return ppm_outs # Copied from transformers.models.beit.modeling_beit.BeitUperHead with Beit->Data2VecVision class Data2VecVisionUperHead(nn.Module): """ Unified Perceptual Parsing for Scene Understanding. This head is the implementation of [UPerNet](https://arxiv.org/abs/1807.10221). Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__() self.pool_scales = config.pool_scales # e.g. (1, 2, 3, 6) self.in_channels = [config.hidden_size] * 4 # e.g. [768, 768, 768, 768] self.channels = config.hidden_size self.align_corners = False self.classifier = nn.Conv2d(self.channels, config.num_labels, kernel_size=1) # PSP Module self.psp_modules = Data2VecVisionPyramidPoolingModule( self.pool_scales, self.in_channels[-1], self.channels, align_corners=self.align_corners, ) self.bottleneck = Data2VecVisionConvModule( self.in_channels[-1] + len(self.pool_scales) * self.channels, self.channels, kernel_size=3, padding=1, ) # FPN Module self.lateral_convs = nn.ModuleList() self.fpn_convs = nn.ModuleList() for in_channels in self.in_channels[:-1]: # skip the top layer l_conv = Data2VecVisionConvModule(in_channels, self.channels, kernel_size=1) fpn_conv = Data2VecVisionConvModule(self.channels, self.channels, kernel_size=3, padding=1) self.lateral_convs.append(l_conv) self.fpn_convs.append(fpn_conv) self.fpn_bottleneck = Data2VecVisionConvModule( len(self.in_channels) * self.channels, self.channels, kernel_size=3, padding=1, ) def psp_forward(self, inputs): x = inputs[-1] psp_outs = [x] psp_outs.extend(self.psp_modules(x)) psp_outs = torch.cat(psp_outs, dim=1) output = self.bottleneck(psp_outs) return output def forward(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor: # build laterals laterals = [lateral_conv(encoder_hidden_states[i]) for i, lateral_conv in enumerate(self.lateral_convs)] laterals.append(self.psp_forward(encoder_hidden_states)) # build top-down path used_backbone_levels = len(laterals) for i in range(used_backbone_levels - 1, 0, -1): prev_shape = laterals[i - 1].shape[2:] laterals[i - 1] = laterals[i - 1] + nn.functional.interpolate( laterals[i], size=prev_shape, mode="bilinear", align_corners=self.align_corners ) # build outputs fpn_outs = [self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels - 1)] # append psp feature fpn_outs.append(laterals[-1]) for i in range(used_backbone_levels - 1, 0, -1): fpn_outs[i] = nn.functional.interpolate( fpn_outs[i], size=fpn_outs[0].shape[2:], mode="bilinear", align_corners=self.align_corners ) fpn_outs = torch.cat(fpn_outs, dim=1) output = self.fpn_bottleneck(fpn_outs) output = self.classifier(output) return output # Copied from transformers.models.beit.modeling_beit.BeitFCNHead with Beit->Data2VecVision class Data2VecVisionFCNHead(nn.Module): """ Fully Convolution Networks for Semantic Segmentation. This head is implemented of [FCNNet](https://arxiv.org/abs/1411.4038>). Args: config (Data2VecVisionConfig): Configuration. in_channels kernel_size (int): The kernel size for convs in the head. Default: 3. dilation (int): The dilation rate for convs in the head. Default: 1. Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation. """ def __init__( self, config: Data2VecVisionConfig, in_index: int = 2, kernel_size: int = 3, dilation: Union[int, Tuple[int, int]] = 1, ) -> None: super().__init__() self.in_channels = config.hidden_size self.channels = config.auxiliary_channels self.num_convs = config.auxiliary_num_convs self.concat_input = config.auxiliary_concat_input self.in_index = in_index conv_padding = (kernel_size // 2) * dilation convs = [] convs.append( Data2VecVisionConvModule( self.in_channels, self.channels, kernel_size=kernel_size, padding=conv_padding, dilation=dilation ) ) for i in range(self.num_convs - 1): convs.append( Data2VecVisionConvModule( self.channels, self.channels, kernel_size=kernel_size, padding=conv_padding, dilation=dilation ) ) if self.num_convs == 0: self.convs = nn.Identity() else: self.convs = nn.Sequential(*convs) if self.concat_input: self.conv_cat = Data2VecVisionConvModule( self.in_channels + self.channels, self.channels, kernel_size=kernel_size, padding=kernel_size // 2 ) self.classifier = nn.Conv2d(self.channels, config.num_labels, kernel_size=1) def forward(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor: # just take the relevant feature maps hidden_states = encoder_hidden_states[self.in_index] output = self.convs(hidden_states) if self.concat_input: output = self.conv_cat(torch.cat([hidden_states, output], dim=1)) output = self.classifier(output) return output @add_start_docstrings( """ Data2VecVision Model transformer with a semantic segmentation head on top e.g. for ADE20k, CityScapes. """, DATA2VEC_VISION_START_DOCSTRING, ) # Copied from transformers.models.beit.modeling_beit.BeitForSemanticSegmentation with BEIT->DATA2VEC_VISION,Beit->Data2VecVision,microsoft/beit-base-finetuned-ade-640-640->facebook/data2vec-vision-base,beit->data2vec_vision class Data2VecVisionForSemanticSegmentation(Data2VecVisionPreTrainedModel): def __init__(self, config: Data2VecVisionConfig) -> None: super().__init__(config) self.num_labels = config.num_labels self.data2vec_vision = Data2VecVisionModel(config, add_pooling_layer=False) # FPNs self.fpn1 = nn.Sequential( nn.ConvTranspose2d(config.hidden_size, config.hidden_size, kernel_size=2, stride=2), nn.BatchNorm2d(config.hidden_size), nn.GELU(), nn.ConvTranspose2d(config.hidden_size, config.hidden_size, kernel_size=2, stride=2), ) self.fpn2 = nn.Sequential( nn.ConvTranspose2d(config.hidden_size, config.hidden_size, kernel_size=2, stride=2), ) self.fpn3 = nn.Identity() self.fpn4 = nn.MaxPool2d(kernel_size=2, stride=2) # Semantic segmentation head(s) self.decode_head = Data2VecVisionUperHead(config) self.auxiliary_head = Data2VecVisionFCNHead(config) if config.use_auxiliary_head else None # Initialize weights and apply final processing self.post_init() def compute_loss(self, logits, auxiliary_logits, labels): # upsample logits to the images' original size upsampled_logits = nn.functional.interpolate( logits, size=labels.shape[-2:], mode="bilinear", align_corners=False ) if auxiliary_logits is not None: upsampled_auxiliary_logits = nn.functional.interpolate( auxiliary_logits, size=labels.shape[-2:], mode="bilinear", align_corners=False ) # compute weighted loss loss_fct = CrossEntropyLoss(ignore_index=self.config.semantic_loss_ignore_index) main_loss = loss_fct(upsampled_logits, labels) auxiliary_loss = loss_fct(upsampled_auxiliary_logits, labels) loss = main_loss + self.config.auxiliary_loss_weight * auxiliary_loss return loss @add_start_docstrings_to_model_forward(DATA2VEC_VISION_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=SemanticSegmenterOutput, config_class=_CONFIG_FOR_DOC) def forward( self, pixel_values: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, labels: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[tuple, SemanticSegmenterOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, height, width)`, *optional*): Ground truth semantic segmentation maps for computing the loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels > 1`, a classification loss is computed (Cross-Entropy). Returns: Examples: ```python >>> from transformers import AutoImageProcessor, Data2VecVisionForSemanticSegmentation >>> from PIL import Image >>> import requests >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" >>> image = Image.open(requests.get(url, stream=True).raw) >>> image_processor = AutoImageProcessor.from_pretrained("facebook/data2vec-vision-base") >>> model = Data2VecVisionForSemanticSegmentation.from_pretrained("facebook/data2vec-vision-base") >>> inputs = image_processor(images=image, return_tensors="pt") >>> outputs = model(**inputs) >>> # logits are of shape (batch_size, num_labels, height, width) >>> logits = outputs.logits ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) outputs = self.data2vec_vision( pixel_values, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=True, # we need the intermediate hidden states return_dict=return_dict, ) encoder_hidden_states = outputs.hidden_states if return_dict else outputs[1] # only keep certain features, and reshape # note that we do +1 as the encoder_hidden_states also includes the initial embeddings features = [feature for idx, feature in enumerate(encoder_hidden_states) if idx + 1 in self.config.out_indices] batch_size = pixel_values.shape[0] patch_resolution = self.config.image_size // self.config.patch_size features = [ x[:, 1:, :].permute(0, 2, 1).reshape(batch_size, -1, patch_resolution, patch_resolution) for x in features ] # apply FPNs ops = [self.fpn1, self.fpn2, self.fpn3, self.fpn4] for i in range(len(features)): features[i] = ops[i](features[i]) logits = self.decode_head(features) auxiliary_logits = None if self.auxiliary_head is not None: auxiliary_logits = self.auxiliary_head(features) loss = None if labels is not None: if self.config.num_labels == 1: raise ValueError("The number of labels should be greater than one") else: loss = self.compute_loss(logits, auxiliary_logits, labels) if not return_dict: if output_hidden_states: output = (logits,) + outputs[1:] else: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return SemanticSegmenterOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states if output_hidden_states else None, attentions=outputs.attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
1,812
src/transformers/models/dinat/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _import_structure = {"configuration_dinat": ["DINAT_PRETRAINED_CONFIG_ARCHIVE_MAP", "DinatConfig"]} try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_dinat"] = [ "DINAT_PRETRAINED_MODEL_ARCHIVE_LIST", "DinatForImageClassification", "DinatModel", "DinatPreTrainedModel", "DinatBackbone", ] if TYPE_CHECKING: from .configuration_dinat import DINAT_PRETRAINED_CONFIG_ARCHIVE_MAP, DinatConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_dinat import ( DINAT_PRETRAINED_MODEL_ARCHIVE_LIST, DinatBackbone, DinatForImageClassification, DinatModel, DinatPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
42,171
src/transformers/models/dinat/modeling_dinat.py
# coding=utf-8 # Copyright 2022 SHI Labs and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch Dilated Neighborhood Attention Transformer model.""" import math from dataclasses import dataclass from typing import Optional, Tuple, Union import torch import torch.utils.checkpoint from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACT2FN from ...modeling_outputs import BackboneOutput from ...modeling_utils import BackboneMixin, PreTrainedModel from ...pytorch_utils import find_pruneable_heads_and_indices, prune_linear_layer from ...utils import ( ModelOutput, OptionalDependencyNotAvailable, add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, is_natten_available, logging, replace_return_docstrings, requires_backends, ) from .configuration_dinat import DinatConfig if is_natten_available(): from natten.functional import natten2dav, natten2dqkrpb else: def natten2dqkrpb(*args, **kwargs): raise OptionalDependencyNotAvailable() def natten2dav(*args, **kwargs): raise OptionalDependencyNotAvailable() logger = logging.get_logger(__name__) # General docstring _CONFIG_FOR_DOC = "DinatConfig" # Base docstring _CHECKPOINT_FOR_DOC = "shi-labs/dinat-mini-in1k-224" _EXPECTED_OUTPUT_SHAPE = [1, 7, 7, 512] # Image classification docstring _IMAGE_CLASS_CHECKPOINT = "shi-labs/dinat-mini-in1k-224" _IMAGE_CLASS_EXPECTED_OUTPUT = "tabby, tabby cat" DINAT_PRETRAINED_MODEL_ARCHIVE_LIST = [ "shi-labs/dinat-mini-in1k-224", # See all Dinat models at https://huggingface.co/models?filter=dinat ] # drop_path and DinatDropPath are from the timm library. @dataclass # Copied from transformers.models.nat.modeling_nat.NatEncoderOutput with Nat->Dinat class DinatEncoderOutput(ModelOutput): """ Dinat encoder's outputs, with potential hidden states and attentions. Args: last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the model. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each stage) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each stage) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. reshaped_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each stage) of shape `(batch_size, hidden_size, height, width)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs reshaped to include the spatial dimensions. """ last_hidden_state: torch.FloatTensor = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None reshaped_hidden_states: Optional[Tuple[torch.FloatTensor]] = None @dataclass # Copied from transformers.models.nat.modeling_nat.NatModelOutput with Nat->Dinat class DinatModelOutput(ModelOutput): """ Dinat model's outputs that also contains a pooling of the last hidden states. Args: last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the model. pooler_output (`torch.FloatTensor` of shape `(batch_size, hidden_size)`, *optional*, returned when `add_pooling_layer=True` is passed): Average pooling of the last layer hidden-state. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each stage) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each stage) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. reshaped_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each stage) of shape `(batch_size, hidden_size, height, width)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs reshaped to include the spatial dimensions. """ last_hidden_state: torch.FloatTensor = None pooler_output: Optional[torch.FloatTensor] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None reshaped_hidden_states: Optional[Tuple[torch.FloatTensor]] = None @dataclass # Copied from transformers.models.nat.modeling_nat.NatImageClassifierOutput with Nat->Dinat class DinatImageClassifierOutput(ModelOutput): """ Dinat outputs for image classification. Args: loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided): Classification (or regression if config.num_labels==1) loss. logits (`torch.FloatTensor` of shape `(batch_size, config.num_labels)`): Classification (or regression if config.num_labels==1) scores (before SoftMax). hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each stage) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each stage) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. reshaped_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each stage) of shape `(batch_size, hidden_size, height, width)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs reshaped to include the spatial dimensions. """ loss: Optional[torch.FloatTensor] = None logits: torch.FloatTensor = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None reshaped_hidden_states: Optional[Tuple[torch.FloatTensor]] = None # Copied from transformers.models.nat.modeling_nat.NatEmbeddings with Nat->Dinat class DinatEmbeddings(nn.Module): """ Construct the patch and position embeddings. """ def __init__(self, config): super().__init__() self.patch_embeddings = DinatPatchEmbeddings(config) self.norm = nn.LayerNorm(config.embed_dim) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, pixel_values: Optional[torch.FloatTensor]) -> Tuple[torch.Tensor]: embeddings = self.patch_embeddings(pixel_values) embeddings = self.norm(embeddings) embeddings = self.dropout(embeddings) return embeddings # Copied from transformers.models.nat.modeling_nat.NatPatchEmbeddings with Nat->Dinat class DinatPatchEmbeddings(nn.Module): """ This class turns `pixel_values` of shape `(batch_size, num_channels, height, width)` into the initial `hidden_states` (patch embeddings) of shape `(batch_size, height, width, hidden_size)` to be consumed by a Transformer. """ def __init__(self, config): super().__init__() patch_size = config.patch_size num_channels, hidden_size = config.num_channels, config.embed_dim self.num_channels = num_channels if patch_size == 4: pass else: # TODO: Support arbitrary patch sizes. raise ValueError("Dinat only supports patch size of 4 at the moment.") self.projection = nn.Sequential( nn.Conv2d(self.num_channels, hidden_size // 2, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)), nn.Conv2d(hidden_size // 2, hidden_size, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)), ) def forward(self, pixel_values: Optional[torch.FloatTensor]) -> torch.Tensor: _, num_channels, height, width = pixel_values.shape if num_channels != self.num_channels: raise ValueError( "Make sure that the channel dimension of the pixel values match with the one set in the configuration." ) embeddings = self.projection(pixel_values) embeddings = embeddings.permute(0, 2, 3, 1) return embeddings # Copied from transformers.models.nat.modeling_nat.NatDownsampler with Nat->Dinat class DinatDownsampler(nn.Module): """ Convolutional Downsampling Layer. Args: dim (`int`): Number of input channels. norm_layer (`nn.Module`, *optional*, defaults to `nn.LayerNorm`): Normalization layer class. """ def __init__(self, dim: int, norm_layer: nn.Module = nn.LayerNorm) -> None: super().__init__() self.dim = dim self.reduction = nn.Conv2d(dim, 2 * dim, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) self.norm = norm_layer(2 * dim) def forward(self, input_feature: torch.Tensor) -> torch.Tensor: input_feature = self.reduction(input_feature.permute(0, 3, 1, 2)).permute(0, 2, 3, 1) input_feature = self.norm(input_feature) return input_feature # Copied from transformers.models.beit.modeling_beit.drop_path def drop_path(input, drop_prob=0.0, training=False, scale_by_keep=True): """ Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks, however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper... See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the argument. """ if drop_prob == 0.0 or not training: return input keep_prob = 1 - drop_prob shape = (input.shape[0],) + (1,) * (input.ndim - 1) # work with diff dim tensors, not just 2D ConvNets random_tensor = keep_prob + torch.rand(shape, dtype=input.dtype, device=input.device) random_tensor.floor_() # binarize output = input.div(keep_prob) * random_tensor return output # Copied from transformers.models.beit.modeling_beit.BeitDropPath with Beit->Dinat class DinatDropPath(nn.Module): """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" def __init__(self, drop_prob: Optional[float] = None) -> None: super().__init__() self.drop_prob = drop_prob def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: return drop_path(hidden_states, self.drop_prob, self.training) def extra_repr(self) -> str: return "p={}".format(self.drop_prob) class NeighborhoodAttention(nn.Module): def __init__(self, config, dim, num_heads, kernel_size, dilation): super().__init__() if dim % num_heads != 0: raise ValueError( f"The hidden size ({dim}) is not a multiple of the number of attention heads ({num_heads})" ) self.num_attention_heads = num_heads self.attention_head_size = int(dim / num_heads) self.all_head_size = self.num_attention_heads * self.attention_head_size self.kernel_size = kernel_size self.dilation = dilation # rpb is learnable relative positional biases; same concept is used Swin. self.rpb = nn.Parameter(torch.zeros(num_heads, (2 * self.kernel_size - 1), (2 * self.kernel_size - 1))) self.query = nn.Linear(self.all_head_size, self.all_head_size, bias=config.qkv_bias) self.key = nn.Linear(self.all_head_size, self.all_head_size, bias=config.qkv_bias) self.value = nn.Linear(self.all_head_size, self.all_head_size, bias=config.qkv_bias) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) # Copied from transformers.models.nat.modeling_nat.NeighborhoodAttention.transpose_for_scores with Nat->Dinat def transpose_for_scores(self, x): new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) x = x.view(new_x_shape) return x.permute(0, 3, 1, 2, 4) def forward( self, hidden_states: torch.Tensor, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor]: query_layer = self.transpose_for_scores(self.query(hidden_states)) key_layer = self.transpose_for_scores(self.key(hidden_states)) value_layer = self.transpose_for_scores(self.value(hidden_states)) # Apply the scale factor before computing attention weights. It's usually more efficient because # attention weights are typically a bigger tensor compared to query. # It gives identical results because scalars are commutable in matrix multiplication. query_layer = query_layer / math.sqrt(self.attention_head_size) # Compute NA between "query" and "key" to get the raw attention scores, and add relative positional biases. attention_scores = natten2dqkrpb(query_layer, key_layer, self.rpb, self.dilation) # Normalize the attention scores to probabilities. attention_probs = nn.functional.softmax(attention_scores, dim=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs) context_layer = natten2dav(attention_probs, value_layer, self.dilation) context_layer = context_layer.permute(0, 2, 3, 1, 4).contiguous() new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) context_layer = context_layer.view(new_context_layer_shape) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) return outputs # Copied from transformers.models.nat.modeling_nat.NeighborhoodAttentionOutput class NeighborhoodAttentionOutput(nn.Module): def __init__(self, config, dim): super().__init__() self.dense = nn.Linear(dim, dim) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) return hidden_states class NeighborhoodAttentionModule(nn.Module): def __init__(self, config, dim, num_heads, kernel_size, dilation): super().__init__() self.self = NeighborhoodAttention(config, dim, num_heads, kernel_size, dilation) self.output = NeighborhoodAttentionOutput(config, dim) self.pruned_heads = set() # Copied from transformers.models.nat.modeling_nat.NeighborhoodAttentionModule.prune_heads def prune_heads(self, heads): if len(heads) == 0: return heads, index = find_pruneable_heads_and_indices( heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads ) # Prune linear layers self.self.query = prune_linear_layer(self.self.query, index) self.self.key = prune_linear_layer(self.self.key, index) self.self.value = prune_linear_layer(self.self.value, index) self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) # Update hyper params and store pruned heads self.self.num_attention_heads = self.self.num_attention_heads - len(heads) self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads self.pruned_heads = self.pruned_heads.union(heads) # Copied from transformers.models.nat.modeling_nat.NeighborhoodAttentionModule.forward def forward( self, hidden_states: torch.Tensor, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor]: self_outputs = self.self(hidden_states, output_attentions) attention_output = self.output(self_outputs[0], hidden_states) outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them return outputs # Copied from transformers.models.nat.modeling_nat.NatIntermediate with Nat->Dinat class DinatIntermediate(nn.Module): def __init__(self, config, dim): super().__init__() self.dense = nn.Linear(dim, int(config.mlp_ratio * dim)) if isinstance(config.hidden_act, str): self.intermediate_act_fn = ACT2FN[config.hidden_act] else: self.intermediate_act_fn = config.hidden_act def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states # Copied from transformers.models.nat.modeling_nat.NatOutput with Nat->Dinat class DinatOutput(nn.Module): def __init__(self, config, dim): super().__init__() self.dense = nn.Linear(int(config.mlp_ratio * dim), dim) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) return hidden_states class DinatLayer(nn.Module): def __init__(self, config, dim, num_heads, dilation, drop_path_rate=0.0): super().__init__() self.chunk_size_feed_forward = config.chunk_size_feed_forward self.kernel_size = config.kernel_size self.dilation = dilation self.window_size = self.kernel_size * self.dilation self.layernorm_before = nn.LayerNorm(dim, eps=config.layer_norm_eps) self.attention = NeighborhoodAttentionModule( config, dim, num_heads, kernel_size=self.kernel_size, dilation=self.dilation ) self.drop_path = DinatDropPath(drop_path_rate) if drop_path_rate > 0.0 else nn.Identity() self.layernorm_after = nn.LayerNorm(dim, eps=config.layer_norm_eps) self.intermediate = DinatIntermediate(config, dim) self.output = DinatOutput(config, dim) self.layer_scale_parameters = ( nn.Parameter(config.layer_scale_init_value * torch.ones((2, dim)), requires_grad=True) if config.layer_scale_init_value > 0 else None ) def maybe_pad(self, hidden_states, height, width): window_size = self.window_size pad_values = (0, 0, 0, 0, 0, 0) if height < window_size or width < window_size: pad_l = pad_t = 0 pad_r = max(0, window_size - width) pad_b = max(0, window_size - height) pad_values = (0, 0, pad_l, pad_r, pad_t, pad_b) hidden_states = nn.functional.pad(hidden_states, pad_values) return hidden_states, pad_values def forward( self, hidden_states: torch.Tensor, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor, torch.Tensor]: batch_size, height, width, channels = hidden_states.size() shortcut = hidden_states hidden_states = self.layernorm_before(hidden_states) # pad hidden_states if they are smaller than kernel size x dilation hidden_states, pad_values = self.maybe_pad(hidden_states, height, width) _, height_pad, width_pad, _ = hidden_states.shape attention_outputs = self.attention(hidden_states, output_attentions=output_attentions) attention_output = attention_outputs[0] was_padded = pad_values[3] > 0 or pad_values[5] > 0 if was_padded: attention_output = attention_output[:, :height, :width, :].contiguous() if self.layer_scale_parameters is not None: attention_output = self.layer_scale_parameters[0] * attention_output hidden_states = shortcut + self.drop_path(attention_output) layer_output = self.layernorm_after(hidden_states) layer_output = self.output(self.intermediate(layer_output)) if self.layer_scale_parameters is not None: layer_output = self.layer_scale_parameters[1] * layer_output layer_output = hidden_states + self.drop_path(layer_output) layer_outputs = (layer_output, attention_outputs[1]) if output_attentions else (layer_output,) return layer_outputs class DinatStage(nn.Module): def __init__(self, config, dim, depth, num_heads, dilations, drop_path_rate, downsample): super().__init__() self.config = config self.dim = dim self.layers = nn.ModuleList( [ DinatLayer( config=config, dim=dim, num_heads=num_heads, dilation=dilations[i], drop_path_rate=drop_path_rate[i], ) for i in range(depth) ] ) # patch merging layer if downsample is not None: self.downsample = downsample(dim=dim, norm_layer=nn.LayerNorm) else: self.downsample = None self.pointing = False # Copied from transformers.models.nat.modeling_nat.NatStage.forward def forward( self, hidden_states: torch.Tensor, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor]: _, height, width, _ = hidden_states.size() for i, layer_module in enumerate(self.layers): layer_outputs = layer_module(hidden_states, output_attentions) hidden_states = layer_outputs[0] hidden_states_before_downsampling = hidden_states if self.downsample is not None: hidden_states = self.downsample(hidden_states_before_downsampling) stage_outputs = (hidden_states, hidden_states_before_downsampling) if output_attentions: stage_outputs += layer_outputs[1:] return stage_outputs class DinatEncoder(nn.Module): def __init__(self, config): super().__init__() self.num_levels = len(config.depths) self.config = config dpr = [x.item() for x in torch.linspace(0, config.drop_path_rate, sum(config.depths))] self.levels = nn.ModuleList( [ DinatStage( config=config, dim=int(config.embed_dim * 2**i_layer), depth=config.depths[i_layer], num_heads=config.num_heads[i_layer], dilations=config.dilations[i_layer], drop_path_rate=dpr[sum(config.depths[:i_layer]) : sum(config.depths[: i_layer + 1])], downsample=DinatDownsampler if (i_layer < self.num_levels - 1) else None, ) for i_layer in range(self.num_levels) ] ) # Copied from transformers.models.nat.modeling_nat.NatEncoder.forward with Nat->Dinat def forward( self, hidden_states: torch.Tensor, output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool] = False, output_hidden_states_before_downsampling: Optional[bool] = False, return_dict: Optional[bool] = True, ) -> Union[Tuple, DinatEncoderOutput]: all_hidden_states = () if output_hidden_states else None all_reshaped_hidden_states = () if output_hidden_states else None all_self_attentions = () if output_attentions else None if output_hidden_states: # rearrange b h w c -> b c h w reshaped_hidden_state = hidden_states.permute(0, 3, 1, 2) all_hidden_states += (hidden_states,) all_reshaped_hidden_states += (reshaped_hidden_state,) for i, layer_module in enumerate(self.levels): layer_outputs = layer_module(hidden_states, output_attentions) hidden_states = layer_outputs[0] hidden_states_before_downsampling = layer_outputs[1] if output_hidden_states and output_hidden_states_before_downsampling: # rearrange b h w c -> b c h w reshaped_hidden_state = hidden_states_before_downsampling.permute(0, 3, 1, 2) all_hidden_states += (hidden_states_before_downsampling,) all_reshaped_hidden_states += (reshaped_hidden_state,) elif output_hidden_states and not output_hidden_states_before_downsampling: # rearrange b h w c -> b c h w reshaped_hidden_state = hidden_states.permute(0, 3, 1, 2) all_hidden_states += (hidden_states,) all_reshaped_hidden_states += (reshaped_hidden_state,) if output_attentions: all_self_attentions += layer_outputs[2:] if not return_dict: return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if v is not None) return DinatEncoderOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, reshaped_hidden_states=all_reshaped_hidden_states, ) class DinatPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = DinatConfig base_model_prefix = "dinat" main_input_name = "pixel_values" def _init_weights(self, module): """Initialize the weights""" if isinstance(module, (nn.Linear, nn.Conv2d)): # Slightly different from the TF version which uses truncated_normal for initialization # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) def _set_gradient_checkpointing(self, module: DinatEncoder, value: bool = False) -> None: pass DINAT_START_DOCSTRING = r""" This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`DinatConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ DINAT_INPUTS_DOCSTRING = r""" Args: pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): Pixel values. Pixel values can be obtained using [`AutoImageProcessor`]. See [`ViTImageProcessor.__call__`] for details. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare Dinat Model transformer outputting raw hidden-states without any specific head on top.", DINAT_START_DOCSTRING, ) # Copied from transformers.models.nat.modeling_nat.NatModel with Nat->Dinat, NAT->DINAT class DinatModel(DinatPreTrainedModel): def __init__(self, config, add_pooling_layer=True): super().__init__(config) requires_backends(self, ["natten"]) self.config = config self.num_levels = len(config.depths) self.num_features = int(config.embed_dim * 2 ** (self.num_levels - 1)) self.embeddings = DinatEmbeddings(config) self.encoder = DinatEncoder(config) self.layernorm = nn.LayerNorm(self.num_features, eps=config.layer_norm_eps) self.pooler = nn.AdaptiveAvgPool1d(1) if add_pooling_layer else None # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embeddings.patch_embeddings def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): self.encoder.layer[layer].attention.prune_heads(heads) @add_start_docstrings_to_model_forward(DINAT_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=DinatModelOutput, config_class=_CONFIG_FOR_DOC, modality="vision", expected_output=_EXPECTED_OUTPUT_SHAPE, ) def forward( self, pixel_values: Optional[torch.FloatTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, DinatModelOutput]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if pixel_values is None: raise ValueError("You have to specify pixel_values") embedding_output = self.embeddings(pixel_values) encoder_outputs = self.encoder( embedding_output, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] sequence_output = self.layernorm(sequence_output) pooled_output = None if self.pooler is not None: pooled_output = self.pooler(sequence_output.flatten(1, 2).transpose(1, 2)) pooled_output = torch.flatten(pooled_output, 1) if not return_dict: output = (sequence_output, pooled_output) + encoder_outputs[1:] return output return DinatModelOutput( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, reshaped_hidden_states=encoder_outputs.reshaped_hidden_states, ) @add_start_docstrings( """ Dinat Model transformer with an image classification head on top (a linear layer on top of the final hidden state of the [CLS] token) e.g. for ImageNet. """, DINAT_START_DOCSTRING, ) class DinatForImageClassification(DinatPreTrainedModel): def __init__(self, config): super().__init__(config) requires_backends(self, ["natten"]) self.num_labels = config.num_labels self.dinat = DinatModel(config) # Classifier head self.classifier = ( nn.Linear(self.dinat.num_features, config.num_labels) if config.num_labels > 0 else nn.Identity() ) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(DINAT_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_IMAGE_CLASS_CHECKPOINT, output_type=DinatImageClassifierOutput, config_class=_CONFIG_FOR_DOC, expected_output=_IMAGE_CLASS_EXPECTED_OUTPUT, ) def forward( self, pixel_values: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, DinatImageClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the image classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.dinat( pixel_values, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) pooled_output = outputs[1] logits = self.classifier(pooled_output) loss = None if labels is not None: if self.config.problem_type is None: if self.num_labels == 1: self.config.problem_type = "regression" elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): self.config.problem_type = "single_label_classification" else: self.config.problem_type = "multi_label_classification" if self.config.problem_type == "regression": loss_fct = MSELoss() if self.num_labels == 1: loss = loss_fct(logits.squeeze(), labels.squeeze()) else: loss = loss_fct(logits, labels) elif self.config.problem_type == "single_label_classification": loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) elif self.config.problem_type == "multi_label_classification": loss_fct = BCEWithLogitsLoss() loss = loss_fct(logits, labels) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return DinatImageClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, reshaped_hidden_states=outputs.reshaped_hidden_states, ) @add_start_docstrings( "NAT backbone, to be used with frameworks like DETR and MaskFormer.", DINAT_START_DOCSTRING, ) class DinatBackbone(DinatPreTrainedModel, BackboneMixin): def __init__(self, config): super().__init__(config) requires_backends(self, ["natten"]) self.stage_names = config.stage_names self.embeddings = DinatEmbeddings(config) self.encoder = DinatEncoder(config) self.out_features = config.out_features if config.out_features is not None else [self.stage_names[-1]] num_features = [int(config.embed_dim * 2**i) for i in range(len(config.depths))] self.out_feature_channels = {} self.out_feature_channels["stem"] = config.embed_dim for i, stage in enumerate(self.stage_names[1:]): self.out_feature_channels[stage] = num_features[i] # Add layer norms to hidden states of out_features hidden_states_norms = {} for stage, num_channels in zip(self.out_features, self.channels): hidden_states_norms[stage] = nn.LayerNorm(num_channels) self.hidden_states_norms = nn.ModuleDict(hidden_states_norms) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embeddings.patch_embeddings @property def channels(self): return [self.out_feature_channels[name] for name in self.out_features] @add_start_docstrings_to_model_forward(DINAT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BackboneOutput, config_class=_CONFIG_FOR_DOC) def forward( self, pixel_values: torch.Tensor, output_hidden_states: Optional[bool] = None, output_attentions: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> BackboneOutput: """ Returns: Examples: ```python >>> from transformers import AutoImageProcessor, AutoBackbone >>> import torch >>> from PIL import Image >>> import requests >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" >>> image = Image.open(requests.get(url, stream=True).raw) >>> processor = AutoImageProcessor.from_pretrained("shi-labs/nat-mini-in1k-224") >>> model = AutoBackbone.from_pretrained( ... "shi-labs/nat-mini-in1k-224", out_features=["stage1", "stage2", "stage3", "stage4"] ... ) >>> inputs = processor(image, return_tensors="pt") >>> outputs = model(**inputs) >>> feature_maps = outputs.feature_maps >>> list(feature_maps[-1].shape) [1, 512, 7, 7] ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions embedding_output = self.embeddings(pixel_values) outputs = self.encoder( embedding_output, output_attentions=output_attentions, output_hidden_states=True, output_hidden_states_before_downsampling=True, return_dict=True, ) hidden_states = outputs.reshaped_hidden_states feature_maps = () for stage, hidden_state in zip(self.stage_names, hidden_states): if stage in self.out_features: batch_size, num_channels, height, width = hidden_state.shape hidden_state = hidden_state.permute(0, 2, 3, 1).contiguous() hidden_state = hidden_state.view(batch_size, height * width, num_channels) hidden_state = self.hidden_states_norms[stage](hidden_state) hidden_state = hidden_state.view(batch_size, height, width, num_channels) hidden_state = hidden_state.permute(0, 3, 1, 2).contiguous() feature_maps += (hidden_state,) if not return_dict: output = (feature_maps,) if output_hidden_states: output += (outputs.hidden_states,) return output return BackboneOutput( feature_maps=feature_maps, hidden_states=outputs.hidden_states if output_hidden_states else None, attentions=outputs.attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
7,032
src/transformers/models/dinat/configuration_dinat.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Dilated Neighborhood Attention Transformer model configuration""" from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) DINAT_PRETRAINED_CONFIG_ARCHIVE_MAP = { "shi-labs/dinat-mini-in1k-224": "https://huggingface.co/shi-labs/dinat-mini-in1k-224/resolve/main/config.json", # See all Dinat models at https://huggingface.co/models?filter=dinat } class DinatConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`DinatModel`]. It is used to instantiate a Dinat model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the Dinat [shi-labs/dinat-mini-in1k-224](https://huggingface.co/shi-labs/dinat-mini-in1k-224) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: patch_size (`int`, *optional*, defaults to 4): The size (resolution) of each patch. NOTE: Only patch size of 4 is supported at the moment. num_channels (`int`, *optional*, defaults to 3): The number of input channels. embed_dim (`int`, *optional*, defaults to 64): Dimensionality of patch embedding. depths (`List[int]`, *optional*, defaults to `[2, 2, 6, 2]`): Number of layers in each level of the encoder. num_heads (`List[int]`, *optional*, defaults to `[3, 6, 12, 24]`): Number of attention heads in each layer of the Transformer encoder. kernel_size (`int`, *optional*, defaults to 7): Neighborhood Attention kernel size. dilations (`List[List[int]]`, *optional*, defaults to `[[1, 8, 1], [1, 4, 1, 4], [1, 2, 1, 2, 1, 2], [1, 1, 1, 1, 1]]`): Dilation value of each NA layer in the Transformer encoder. mlp_ratio (`float`, *optional*, defaults to 3.0): Ratio of MLP hidden dimensionality to embedding dimensionality. qkv_bias (`bool`, *optional*, defaults to `True`): Whether or not a learnable bias should be added to the queries, keys and values. hidden_dropout_prob (`float`, *optional*, defaults to 0.0): The dropout probability for all fully connected layers in the embeddings and encoder. attention_probs_dropout_prob (`float`, *optional*, defaults to 0.0): The dropout ratio for the attention probabilities. drop_path_rate (`float`, *optional*, defaults to 0.1): Stochastic depth rate. hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. layer_scale_init_value (`float`, *optional*, defaults to 0.0): The initial value for the layer scale. Disabled if <=0. out_features (`List[str]`, *optional*): If used as backbone, list of features to output. Can be any of `"stem"`, `"stage1"`, `"stage2"`, etc. (depending on how many stages the model has). Will default to the last stage if unset. Example: ```python >>> from transformers import DinatConfig, DinatModel >>> # Initializing a Dinat shi-labs/dinat-mini-in1k-224 style configuration >>> configuration = DinatConfig() >>> # Initializing a model (with random weights) from the shi-labs/dinat-mini-in1k-224 style configuration >>> model = DinatModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "dinat" attribute_map = { "num_attention_heads": "num_heads", "num_hidden_layers": "num_layers", } def __init__( self, patch_size=4, num_channels=3, embed_dim=64, depths=[3, 4, 6, 5], num_heads=[2, 4, 8, 16], kernel_size=7, dilations=[[1, 8, 1], [1, 4, 1, 4], [1, 2, 1, 2, 1, 2], [1, 1, 1, 1, 1]], mlp_ratio=3.0, qkv_bias=True, hidden_dropout_prob=0.0, attention_probs_dropout_prob=0.0, drop_path_rate=0.1, hidden_act="gelu", initializer_range=0.02, layer_norm_eps=1e-5, layer_scale_init_value=0.0, out_features=None, **kwargs, ): super().__init__(**kwargs) self.patch_size = patch_size self.num_channels = num_channels self.embed_dim = embed_dim self.depths = depths self.num_layers = len(depths) self.num_heads = num_heads self.kernel_size = kernel_size self.dilations = dilations self.mlp_ratio = mlp_ratio self.qkv_bias = qkv_bias self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.drop_path_rate = drop_path_rate self.hidden_act = hidden_act self.layer_norm_eps = layer_norm_eps self.initializer_range = initializer_range # we set the hidden_size attribute in order to make Dinat work with VisionEncoderDecoderModel # this indicates the channel dimension after the last stage of the model self.hidden_size = int(embed_dim * 2 ** (len(depths) - 1)) self.layer_scale_init_value = layer_scale_init_value self.stage_names = ["stem"] + [f"stage{idx}" for idx in range(1, len(depths) + 1)] if out_features is not None: if not isinstance(out_features, list): raise ValueError("out_features should be a list") for feature in out_features: if feature not in self.stage_names: raise ValueError( f"Feature {feature} is not a valid feature name. Valid names are {self.stage_names}" ) self.out_features = out_features
27182812/ChatGLM-LLaMA-chinese-insturct
2,316
src/transformers/models/bigbird_pegasus/__init__.py
# Copyright 2021 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _import_structure = { "configuration_bigbird_pegasus": [ "BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP", "BigBirdPegasusConfig", "BigBirdPegasusOnnxConfig", ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_bigbird_pegasus"] = [ "BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST", "BigBirdPegasusForCausalLM", "BigBirdPegasusForConditionalGeneration", "BigBirdPegasusForQuestionAnswering", "BigBirdPegasusForSequenceClassification", "BigBirdPegasusModel", "BigBirdPegasusPreTrainedModel", ] if TYPE_CHECKING: from .configuration_bigbird_pegasus import ( BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP, BigBirdPegasusConfig, BigBirdPegasusOnnxConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_bigbird_pegasus import ( BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST, BigBirdPegasusForCausalLM, BigBirdPegasusForConditionalGeneration, BigBirdPegasusForQuestionAnswering, BigBirdPegasusForSequenceClassification, BigBirdPegasusModel, BigBirdPegasusPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
147,190
src/transformers/models/bigbird_pegasus/modeling_bigbird_pegasus.py
# coding=utf-8 # Copyright 2021 Google Research The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch BigBirdPegasus model.""" import copy import math import random from typing import List, Optional, Tuple, Union import numpy as np import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACT2FN from ...modeling_outputs import ( BaseModelOutput, BaseModelOutputWithPastAndCrossAttentions, CausalLMOutputWithCrossAttentions, Seq2SeqLMOutput, Seq2SeqModelOutput, Seq2SeqQuestionAnsweringModelOutput, Seq2SeqSequenceClassifierOutput, ) from ...modeling_utils import PreTrainedModel from ...pytorch_utils import torch_int_div from ...utils import ( add_code_sample_docstrings, add_end_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_bigbird_pegasus import BigBirdPegasusConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "google/bigbird-pegasus-large-arxiv" _CONFIG_FOR_DOC = "BigBirdPegasusConfig" _EXPECTED_OUTPUT_SHAPE = [1, 7, 1024] BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST = [ "google/bigbird-pegasus-large-arxiv", "google/bigbird-pegasus-large-pubmed", "google/bigbird-pegasus-large-bigpatent", # See all BigBirdPegasus models at https://huggingface.co/models?filter=bigbird_pegasus ] def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int): """ Shift input ids one token to the right. """ shifted_input_ids = input_ids.new_zeros(input_ids.shape) shifted_input_ids[:, 1:] = input_ids[:, :-1].clone() shifted_input_ids[:, 0] = decoder_start_token_id if pad_token_id is None: raise ValueError("self.model.config.pad_token_id has to be defined.") # replace possible -100 values in labels by `pad_token_id` shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id) return shifted_input_ids def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0): """ Make causal mask used for bi-directional self-attention. """ bsz, tgt_len = input_ids_shape mask = torch.full((tgt_len, tgt_len), torch.tensor(torch.finfo(dtype).min)) mask_cond = torch.arange(mask.size(-1)) mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) mask = mask.to(dtype) if past_key_values_length > 0: mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1) return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length) def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ bsz, src_len = mask.size() tgt_len = tgt_len if tgt_len is not None else src_len expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) inverted_mask = 1.0 - expanded_mask return inverted_mask.masked_fill(inverted_mask.bool(), torch.finfo(dtype).min) class BigBirdPegasusLearnedPositionalEmbedding(nn.Embedding): """ This module learns positional embeddings up to a fixed maximum size. """ def __init__(self, num_embeddings: int, embedding_dim: int): super().__init__(num_embeddings, embedding_dim) def forward(self, input_ids_shape: torch.Size, past_key_values_length: int = 0): """`input_ids_shape` is expected to be [bsz x seqlen].""" bsz, seq_len = input_ids_shape[:2] positions = torch.arange( past_key_values_length, past_key_values_length + seq_len, dtype=torch.long, device=self.weight.device ) return super().forward(positions) # Copied from transformers.models.big_bird.modeling_big_bird.BigBirdSelfAttention with BigBird->BigBirdPegasus class BigBirdPegasusSelfAttention(nn.Module): def __init__(self, config): super().__init__() if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): raise ValueError( f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention " f"heads ({config.num_attention_heads})" ) self.num_attention_heads = config.num_attention_heads self.attention_head_size = int(config.hidden_size / config.num_attention_heads) self.all_head_size = self.num_attention_heads * self.attention_head_size self.query = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias) self.key = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias) self.value = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) self.is_decoder = config.is_decoder def transpose_for_scores(self, x): new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) x = x.view(*new_x_shape) return x.permute(0, 2, 1, 3) def forward( self, hidden_states, attention_mask=None, head_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, past_key_value=None, output_attentions=False, ): mixed_query_layer = self.query(hidden_states) # If this is instantiated as a cross-attention module, the keys # and values come from an encoder; the attention mask needs to be # such that the encoder's padding tokens are not attended to. is_cross_attention = encoder_hidden_states is not None if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_layer = past_key_value[0] value_layer = past_key_value[1] attention_mask = encoder_attention_mask elif is_cross_attention: key_layer = self.transpose_for_scores(self.key(encoder_hidden_states)) value_layer = self.transpose_for_scores(self.value(encoder_hidden_states)) attention_mask = encoder_attention_mask elif past_key_value is not None: key_layer = self.transpose_for_scores(self.key(hidden_states)) value_layer = self.transpose_for_scores(self.value(hidden_states)) key_layer = torch.cat([past_key_value[0], key_layer], dim=2) value_layer = torch.cat([past_key_value[1], value_layer], dim=2) else: key_layer = self.transpose_for_scores(self.key(hidden_states)) value_layer = self.transpose_for_scores(self.value(hidden_states)) query_layer = self.transpose_for_scores(mixed_query_layer) if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_layer, value_layer) # Take the dot product between "query" and "key" to get the raw attention scores. attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) attention_scores = attention_scores / math.sqrt(self.attention_head_size) if attention_mask is not None: # Apply the attention mask is (precomputed for all layers in BigBirdPegasusModel forward() function) attention_scores = attention_scores + attention_mask # Normalize the attention scores to probabilities. attention_probs = nn.functional.softmax(attention_scores, dim=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs) # Mask heads if we want to if head_mask is not None: attention_probs = attention_probs * head_mask context_layer = torch.matmul(attention_probs, value_layer) context_layer = context_layer.permute(0, 2, 1, 3).contiguous() new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) context_layer = context_layer.view(*new_context_layer_shape) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) if self.is_decoder: outputs = outputs + (past_key_value,) return outputs # Copied from transformers.models.big_bird.modeling_big_bird.BigBirdBlockSparseAttention with BigBird->BigBirdPegasus class BigBirdPegasusBlockSparseAttention(nn.Module): def __init__(self, config, seed=None): super().__init__() self.max_seqlen = config.max_position_embeddings self.seed = seed if config.hidden_size % config.num_attention_heads != 0: raise ValueError( f"The hidden size {config.hidden_size} is not a multiple of the number of attention " f"heads {config.num_attention_heads}." ) self.num_attention_heads = config.num_attention_heads self.num_random_blocks = config.num_random_blocks self.block_size = config.block_size self.attention_head_size = int(config.hidden_size / config.num_attention_heads) self.all_head_size = self.num_attention_heads * self.attention_head_size self.query = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias) self.key = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias) self.value = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias) def transpose_for_scores(self, x): new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) x = x.view(*new_x_shape) return x.permute(0, 2, 1, 3) def forward( self, hidden_states, band_mask=None, from_mask=None, to_mask=None, from_blocked_mask=None, to_blocked_mask=None, output_attentions=None, ): # Currently this `class` can't be used in decoder. batch_size, seqlen, _ = hidden_states.size() to_seq_length = from_seq_length = seqlen from_block_size = to_block_size = self.block_size if from_seq_length % from_block_size != 0: raise ValueError("Query sided sequence length must be multiple of block size") if to_seq_length % to_block_size != 0: raise ValueError("Key/Value sided sequence length must be multiple of block size") query_layer = self.transpose_for_scores(self.query(hidden_states)) key_layer = self.transpose_for_scores(self.key(hidden_states)) value_layer = self.transpose_for_scores(self.value(hidden_states)) context_layer, attention_probs = self.bigbird_block_sparse_attention( query_layer, key_layer, value_layer, band_mask, from_mask, to_mask, from_blocked_mask, to_blocked_mask, self.num_attention_heads, self.num_random_blocks, self.attention_head_size, from_block_size, to_block_size, batch_size, from_seq_length, to_seq_length, seed=self.seed, plan_from_length=None, plan_num_rand_blocks=None, output_attentions=output_attentions, ) context_layer = context_layer.contiguous().view(batch_size, from_seq_length, -1) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) return outputs @staticmethod def torch_bmm_nd(inp_1, inp_2, ndim=None): """Fast nd matrix multiplication""" # faster replacement of torch.einsum ("bhqk,bhkd->bhqd") return torch.bmm(inp_1.reshape((-1,) + inp_1.shape[-2:]), inp_2.reshape((-1,) + inp_2.shape[-2:])).view( inp_1.shape[: ndim - 2] + (inp_1.shape[ndim - 2], inp_2.shape[ndim - 1]) ) @staticmethod def torch_bmm_nd_transpose(inp_1, inp_2, ndim=None): """Fast nd matrix multiplication with transpose""" # faster replacement of torch.einsum (bhqd,bhkd->bhqk) return torch.bmm( inp_1.reshape((-1,) + inp_1.shape[-2:]), inp_2.reshape((-1,) + inp_2.shape[-2:]).transpose(1, 2) ).view(inp_1.shape[: ndim - 2] + (inp_1.shape[ndim - 2], inp_2.shape[ndim - 2])) def bigbird_block_sparse_attention( self, query_layer, key_layer, value_layer, band_mask, from_mask, to_mask, from_blocked_mask, to_blocked_mask, n_heads, n_rand_blocks, attention_head_size, from_block_size, to_block_size, batch_size, from_seq_len, to_seq_len, seed, plan_from_length, plan_num_rand_blocks, output_attentions, ): # BigBirdPegasus block-sparse attention as suggested in paper # ITC: # global tokens: 2 x block_size # window tokens: 3 x block_size # random tokens: num_rand_tokens x block_size # ETC: # global tokens: extra_globals_tokens + 2 x block_size # window tokens: 3 x block_size # random tokens: num_rand_tokens x block_size # Note: # 1) Currently, ETC is not supported. # 2) Window size is fixed to 3 blocks & it can be changed only by # changing `block_size`. # 3) Number of global blocks are fixed (2 blocks here) & global tokens can be # controlled only by `block_size`. # attention is calculated separately for q[0], q[1], q[2:-2], q[-2], q[-1] in order to use special trick of shifting tokens (for calculating sliding attention) # hence following code can be divided into 5 parts. if from_seq_len // from_block_size != to_seq_len // to_block_size: raise ValueError("Error the number of blocks needs to be same!") rsqrt_d = 1 / math.sqrt(attention_head_size) bsz = batch_size attn_mask_penalty = -10000.0 # generate random attention and corresponding masks np.random.seed(seed) if from_seq_len in [1024, 3072, 4096]: # old plans used in paper rand_attn = [ self._bigbird_block_rand_mask( self.max_seqlen, self.max_seqlen, from_block_size, to_block_size, n_rand_blocks, last_idx=1024 )[: (from_seq_len // from_block_size - 2)] for _ in range(n_heads) ] else: if plan_from_length is None: plan_from_length, plan_num_rand_blocks = self._get_rand_attn_plan( from_seq_len, from_block_size, n_rand_blocks ) rand_attn = self._bigbird_block_rand_mask_with_head( from_seq_length=from_seq_len, to_seq_length=to_seq_len, from_block_size=from_block_size, to_block_size=to_block_size, num_heads=n_heads, plan_from_length=plan_from_length, plan_num_rand_blocks=plan_num_rand_blocks, ) rand_attn = np.stack(rand_attn, axis=0) rand_attn = torch.tensor(rand_attn, device=query_layer.device, dtype=torch.long) rand_attn.unsqueeze_(0) rand_attn = torch.cat([rand_attn for _ in range(batch_size)], dim=0) rand_mask = self._create_rand_mask_from_inputs( from_blocked_mask, to_blocked_mask, rand_attn, n_heads, n_rand_blocks, bsz, from_seq_len, from_block_size ) blocked_query_matrix = query_layer.view(bsz, n_heads, from_seq_len // from_block_size, from_block_size, -1) blocked_key_matrix = key_layer.view(bsz, n_heads, to_seq_len // to_block_size, to_block_size, -1) blocked_value_matrix = value_layer.view(bsz, n_heads, to_seq_len // to_block_size, to_block_size, -1) # preparing block for randn attn gathered_key = self.torch_gather_b2(blocked_key_matrix, rand_attn) gathered_key = gathered_key.view( bsz, n_heads, to_seq_len // to_block_size - 2, n_rand_blocks * to_block_size, -1 ) # [bsz, n_heads, to_seq_len//to_block_size-2, n_rand_blocks, to_block_size, -1] gathered_value = self.torch_gather_b2(blocked_value_matrix, rand_attn) gathered_value = gathered_value.view( bsz, n_heads, to_seq_len // to_block_size - 2, n_rand_blocks * to_block_size, -1 ) # [bsz, n_heads, to_seq_len//to_block_size-2, n_rand_blocks, to_block_size, -1] # 1st PART # 1st block (global block) attention scores # q[0] x (k[0], k[1], k[2], k[3], k[4] .... ) # [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, to_seq_len] first_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, 0], key_layer, ndim=4) first_product = first_product * rsqrt_d first_product += (1.0 - to_mask) * attn_mask_penalty first_attn_weights = nn.functional.softmax( first_product, dim=-1 ) # [bsz, n_heads, from_block_size, to_seq_len] # [bsz, n_heads, from_block_size, to_seq_len] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, -1] first_context_layer = self.torch_bmm_nd(first_attn_weights, value_layer, ndim=4) first_context_layer.unsqueeze_(2) # 2nd PART # 2nd block attention scores # q[1] x (sliding_keys, random_keys, global_keys) # sliding key blocks -> 2nd, 3rd blocks # global key blocks -> 1st block second_key_mat = torch.cat( [ blocked_key_matrix[:, :, 0], blocked_key_matrix[:, :, 1], blocked_key_matrix[:, :, 2], blocked_key_matrix[:, :, -1], gathered_key[:, :, 0], ], dim=2, ) # [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] second_value_mat = torch.cat( [ blocked_value_matrix[:, :, 0], blocked_value_matrix[:, :, 1], blocked_value_matrix[:, :, 2], blocked_value_matrix[:, :, -1], gathered_value[:, :, 0], ], dim=2, ) # [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] # [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] second_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, 1], second_key_mat, ndim=4) second_seq_pad = torch.cat( [ to_mask[:, :, :, : 3 * to_block_size], to_mask[:, :, :, -to_block_size:], to_mask.new_ones([bsz, 1, 1, n_rand_blocks * to_block_size]), ], dim=3, ) second_rand_pad = torch.cat( [ rand_mask.new_ones([bsz, n_heads, from_block_size, 4 * to_block_size]), rand_mask[:, :, 0], ], dim=3, ) second_product = second_product * rsqrt_d second_product += (1.0 - torch.minimum(second_seq_pad, second_rand_pad)) * attn_mask_penalty second_attn_weights = nn.functional.softmax( second_product, dim=-1 ) # [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] # [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, -1] second_context_layer = self.torch_bmm_nd(second_attn_weights, second_value_mat, ndim=4) second_context_layer.unsqueeze_(2) # 3rd PART # Middle blocks attention scores # q[-2:2] x (sliding_keys, random_keys, global_keys) # sliding attn is calculated using special trick of shifting tokens as discussed in paper # random keys are generated by taking random indices as per `rand_attn` # global keys -> 1st & last block exp_blocked_key_matrix = torch.cat( [blocked_key_matrix[:, :, 1:-3], blocked_key_matrix[:, :, 2:-2], blocked_key_matrix[:, :, 3:-1]], dim=3 ) # [bsz, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1] exp_blocked_value_matrix = torch.cat( [blocked_value_matrix[:, :, 1:-3], blocked_value_matrix[:, :, 2:-2], blocked_value_matrix[:, :, 3:-1]], dim=3, ) # [bsz, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1] middle_query_matrix = blocked_query_matrix[:, :, 2:-2] # sliding attention scores for q[-2:2] # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [b, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1] inner_band_product = self.torch_bmm_nd_transpose(middle_query_matrix, exp_blocked_key_matrix, ndim=5) # ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, 3*to_block_size] inner_band_product = inner_band_product * rsqrt_d # randn attention scores for q[-2:2] # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [bsz, n_heads, from_seq_len//from_block_size-4, n_rand_blocks*to_block_size, -1] rand_band_product = self.torch_bmm_nd_transpose(middle_query_matrix, gathered_key[:, :, 1:-1], ndim=5) # ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, n_rand_blocks*to_block_size] rand_band_product = rand_band_product * rsqrt_d # Including 1st block (since it's global) first_band_product = torch.einsum( "bhlqd,bhkd->bhlqk", middle_query_matrix, blocked_key_matrix[:, :, 0] ) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size] first_band_product = first_band_product * rsqrt_d # Including last block (since it's global) last_band_product = torch.einsum( "bhlqd,bhkd->bhlqk", middle_query_matrix, blocked_key_matrix[:, :, -1] ) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size] last_band_product = last_band_product * rsqrt_d # masking padded tokens inner_band_product += (1.0 - band_mask) * attn_mask_penalty first_band_product += (1.0 - to_mask[:, :, :, :to_block_size].unsqueeze(3)) * attn_mask_penalty last_band_product += (1.0 - to_mask[:, :, :, -to_block_size:].unsqueeze(3)) * attn_mask_penalty rand_band_product += (1.0 - rand_mask[:, :, 1:-1]) * attn_mask_penalty # completing attention scores matrix for all q[-2:2] band_product = torch.cat( [first_band_product, inner_band_product, rand_band_product, last_band_product], dim=-1 ) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, (5+n_rand_blocks)*to_block_size] # safely doing softmax since attention matrix is completed attn_weights = nn.functional.softmax( band_product, dim=-1 ) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, (5+n_rand_blocks)*to_block_size] # contribution of sliding keys # [bsz, n_heads, m//from_block_size-4, from_block_size, 3*to_block_size] x [bsz, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1] context_layer = self.torch_bmm_nd( attn_weights[:, :, :, :, to_block_size : 4 * to_block_size], exp_blocked_value_matrix, ndim=5 ) # ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] # adding contribution of random keys # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, n_rand_blocks*to_block_size] x [bsz, n_heads, from_seq_len//from_block_size-4, n_rand_blocks*to_block_size, -1] context_layer += self.torch_bmm_nd( attn_weights[:, :, :, :, 4 * to_block_size : -to_block_size], gathered_value[:, :, 1:-1], ndim=5 ) # ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] # adding contribution of global keys context_layer += torch.einsum( "bhlqk,bhkd->bhlqd", attn_weights[:, :, :, :, :to_block_size], blocked_value_matrix[:, :, 0] ) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] context_layer += torch.einsum( "bhlqk,bhkd->bhlqd", attn_weights[:, :, :, :, -to_block_size:], blocked_value_matrix[:, :, -1] ) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] # 4th PART # last 2nd token attention scores # q[-2] x (sliding_keys, random_keys, global_keys) # sliding key blocks -> last 3 blocks # global key block -> 1st block # random key block -> based on indices stored in `randn_attn` second_last_key_mat = torch.cat( [ blocked_key_matrix[:, :, 0], blocked_key_matrix[:, :, -3], blocked_key_matrix[:, :, -2], blocked_key_matrix[:, :, -1], gathered_key[:, :, -1], ], dim=2, ) # [bsz, n_heads, (4+n_random_blocks)*to_block_size, -1] second_last_value_mat = torch.cat( [ blocked_value_matrix[:, :, 0], blocked_value_matrix[:, :, -3], blocked_value_matrix[:, :, -2], blocked_value_matrix[:, :, -1], gathered_value[:, :, -1], ], dim=2, ) # [bsz, n_heads, (4+r)*to_block_size, -1] # [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] second_last_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, -2], second_last_key_mat, ndim=4) second_last_seq_pad = torch.cat( [ to_mask[:, :, :, :to_block_size], to_mask[:, :, :, -3 * to_block_size :], to_mask.new_ones([bsz, 1, 1, n_rand_blocks * to_block_size]), ], dim=3, ) second_last_rand_pad = torch.cat( [ rand_mask.new_ones([bsz, n_heads, from_block_size, 4 * to_block_size]), rand_mask[:, :, -1], ], dim=3, ) second_last_product = second_last_product * rsqrt_d second_last_product += (1.0 - torch.minimum(second_last_seq_pad, second_last_rand_pad)) * attn_mask_penalty second_last_attn_weights = nn.functional.softmax( second_last_product, dim=-1 ) # [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] # [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, -1] second_last_context_layer = self.torch_bmm_nd(second_last_attn_weights, second_last_value_mat, ndim=4) second_last_context_layer.unsqueeze_(2) # 5th PART # last block (global) attention scores # q[-1] x (k[0], k[1], k[2], k[3], .... ) # [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, to_seq_len] last_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, -1], key_layer, ndim=4) last_product = last_product * rsqrt_d last_product += (1.0 - to_mask) * attn_mask_penalty last_attn_weights = nn.functional.softmax(last_product, dim=-1) # [bsz, n_heads, from_block_size, n] # [bsz, n_heads, from_block_size, to_seq_len] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, -1] last_context_layer = self.torch_bmm_nd(last_attn_weights, value_layer, ndim=4) last_context_layer.unsqueeze_(2) # combining representations of all tokens context_layer = torch.cat( [first_context_layer, second_context_layer, context_layer, second_last_context_layer, last_context_layer], dim=2, ) context_layer = context_layer.view((bsz, n_heads, from_seq_len, -1)) * from_mask context_layer = torch.transpose(context_layer, 1, 2) # this is just for visualizing; forward pass doesn't depend on following code if output_attentions: # TODO(PVP): need to verify if below code is correct attention_probs = torch.zeros( bsz, n_heads, from_seq_len, to_seq_len, dtype=torch.float, device=context_layer.device ) # 1st query block # corresponding to `first_context_layer` attention_probs[:, :, :from_block_size, :] = first_attn_weights # all keys global # 2nd query block # corresponding to `second_context_layer` attention_probs[:, :, from_block_size : 2 * from_block_size, : 3 * to_block_size] = second_attn_weights[ :, :, :, : 3 * to_block_size ] # 1st three key blocks (global + sliding) attention_probs[:, :, from_block_size : 2 * from_block_size, -to_block_size:] = second_attn_weights[ :, :, :, 3 * to_block_size : 4 * to_block_size ] # last key block (global) # random keys for p1, i1, w1 in zip(range(bsz), rand_attn, second_attn_weights): # p1, i1, w1 corresponds to batch_dim i.e. following operation is done for each sequence in batch for p2, i2, w2 in zip(range(n_heads), i1, w1): # p2, i2, w2 corresponds to head_dim i.e. following operation is done for each heads attn_probs_view = attention_probs.view( bsz, n_heads, from_seq_len // from_block_size, from_block_size, to_seq_len // to_block_size, to_block_size, ) right_slice = w2[:, 4 * to_block_size :] attn_probs_view[p1, p2, 1, :, i2[0]] = right_slice.view( from_block_size, n_rand_blocks, to_block_size ) # Middle query blocks # corresponding to `context_layer` # sliding keys for q_idx in range(from_seq_len // from_block_size - 4): attn_probs_view = attention_probs.view( bsz, n_heads, from_seq_len // from_block_size, from_block_size, to_seq_len // to_block_size, to_block_size, )[:, :, 2:-2, :, 1:-1, :] right_slice = attn_weights[:, :, q_idx, :, to_block_size : 4 * to_block_size] attn_probs_view[:, :, q_idx, :, q_idx : q_idx + 3, :] = right_slice.view( bsz, n_heads, from_block_size, 3, to_block_size ) # inner_band_product # global keys (corresponding to 1st key block) attention_probs[:, :, 2 * from_block_size : -2 * from_block_size, :to_block_size] = attn_weights[ :, :, :, :, :to_block_size ].view( bsz, n_heads, -1, to_block_size ) # first_band_product # global keys (corresponding to last key block) attention_probs[:, :, 2 * from_block_size : -2 * from_block_size, -to_block_size:] = attn_weights[ :, :, :, :, -to_block_size: ].view( bsz, n_heads, -1, to_block_size ) # last_band_product # random keys for p1, i1, w1 in zip(range(bsz), rand_attn, attn_weights): # p1, i1, w1 corresponds to batch_dim i.e. following operation is done for each sequence in batch for p2, i2, w2 in zip(range(n_heads), i1, w1): # p2, i2, w2 corresponds to head_dim i.e. following operation is done for each heads for q_idx in range(1, len(i2) - 1): attn_probs_view = attention_probs.view( bsz, n_heads, from_seq_len // from_block_size, from_block_size, to_seq_len // to_block_size, to_block_size, ) right_slice = w2[q_idx - 1, :, 4 * to_block_size : -to_block_size] attn_probs_view[p1, p2, q_idx + 1, :, i2[q_idx]] = right_slice.view( from_block_size, n_rand_blocks, to_block_size ) # Second-last query block # corresponding to `second_last_context_layer` attention_probs[:, :, -2 * from_block_size : -from_block_size, :to_block_size] = second_last_attn_weights[ :, :, :, :to_block_size ] # 1st key block (global) attention_probs[ :, :, -2 * from_block_size : -from_block_size, -3 * to_block_size : ] = second_last_attn_weights[ :, :, :, to_block_size : 4 * to_block_size ] # last three blocks (global + sliding) # random keys for p1, i1, w1 in zip(range(bsz), rand_attn, second_last_attn_weights): # p1, i1, w1 corresponds to batch_dim i.e. following operation is done for each sequence in batch for p2, i2, w2 in zip(range(n_heads), i1, w1): # p2, i2, w2 corresponds to head_dim i.e. following operation is done for each heads attn_probs_view = attention_probs.view( bsz, n_heads, from_seq_len // from_block_size, from_block_size, to_seq_len // to_block_size, to_block_size, ) right_slice = w2[:, 4 * to_block_size :] attn_probs_view[p1, p2, -2, :, i2[-1]] = right_slice.view( from_block_size, n_rand_blocks, to_block_size ) # last query block # corresponding to `last_context_layer` attention_probs[:, :, -from_block_size:, :] = last_attn_weights # all keys global else: attention_probs = None return context_layer, attention_probs @staticmethod def torch_gather_b2(params, indices): # this operation is equivalent to tf.gather when batch_dims=2 if params.shape[:2] != indices.shape[:2]: raise ValueError( "Make sure that the first two dimensions of params and indices are identical, but" f" they are params: {params.shape[:2]} vs. indices: {indices.shape[:2]}" ) num_indices_to_gather = indices.shape[-2] * indices.shape[-1] num_indices_to_pick_from = params.shape[2] shift = torch.arange(indices.shape[0] * indices.shape[1] * num_indices_to_gather, device=indices.device) indices_shift = torch_int_div(shift, num_indices_to_gather) * num_indices_to_pick_from flattened_indices = indices.view(-1) + indices_shift flattened_params = params.reshape(-1, params.shape[-2], params.shape[-1]) out_flattened = flattened_params.index_select(0, flattened_indices) out = out_flattened.reshape(params.shape[:2] + (num_indices_to_gather,) + params.shape[3:]) return out @staticmethod def _create_rand_mask_from_inputs( from_blocked_mask, to_blocked_mask, rand_attn, num_attention_heads, num_rand_blocks, batch_size, from_seq_length, from_block_size, ): """ Create 3D attention mask from a 2D tensor mask. Args: from_blocked_mask: 2D Tensor of shape [batch_size, from_seq_length//from_block_size, from_block_size]. to_blocked_mask: int32 Tensor of shape [batch_size, to_seq_length//to_block_size, to_block_size]. rand_attn: [batch_size, num_attention_heads, from_seq_length//from_block_size-2, num_rand_blocks] num_attention_heads: int. Number of attention heads. num_rand_blocks: int. Number of random chunks per row. batch_size: int. Batch size for computation. from_seq_length: int. length of from sequence. from_block_size: int. size of block in from sequence. Returns: float Tensor of shape [batch_size, num_attention_heads, from_seq_length//from_block_size-2, from_block_size, num_rand_blocks*to_block_size]. """ num_windows = from_seq_length // from_block_size - 2 rand_mask = torch.stack([p1[i1.flatten()] for p1, i1 in zip(to_blocked_mask, rand_attn)]) rand_mask = rand_mask.view(batch_size, num_attention_heads, num_windows, num_rand_blocks * from_block_size) rand_mask = torch.einsum("blq,bhlk->bhlqk", from_blocked_mask[:, 1:-1], rand_mask) return rand_mask @staticmethod def _get_rand_attn_plan(from_seq_length, from_block_size, num_rand_blocks): """ Gives the plan of where to put random attention. Args: from_seq_length: int. length of from sequence. from_block_size: int. size of block in from sequence. num_rand_blocks: int. Number of random chunks per row. Returns: plan_from_length: ending location of from block plan_num_rand_blocks: number of random ending location for each block """ plan_from_length = [] plan_num_rand_blocks = [] if (2 * num_rand_blocks + 5) < (from_seq_length // from_block_size): plan_from_length.append(int((2 * num_rand_blocks + 5) * from_block_size)) plan_num_rand_blocks.append(num_rand_blocks) plan_from_length.append(from_seq_length) plan_num_rand_blocks.append(0) elif (num_rand_blocks + 5) < (from_seq_length // from_block_size): plan_from_length.append(int((num_rand_blocks + 5) * from_block_size)) plan_num_rand_blocks.append(num_rand_blocks // 2) plan_from_length.append(from_seq_length) plan_num_rand_blocks.append(num_rand_blocks - (num_rand_blocks // 2)) else: plan_from_length.append(from_seq_length) plan_num_rand_blocks.append(num_rand_blocks) return plan_from_length, plan_num_rand_blocks @staticmethod def _bigbird_block_rand_mask( from_seq_length, to_seq_length, from_block_size, to_block_size, num_rand_blocks, last_idx=-1 ): """ Create adjacency list of random attention. Args: from_seq_length: int. length of from sequence. to_seq_length: int. length of to sequence. from_block_size: int. size of block in from sequence. to_block_size: int. size of block in to sequence. num_rand_blocks: int. Number of random chunks per row. last_idx: if -1 then num_rand_blocks blocks chosen anywhere in to sequence, if positive then num_rand_blocks blocks chosen only up to last_idx. Returns: adjacency list of size from_seq_length//from_block_size-2 by num_rand_blocks """ # using this method when from_seq_length in [1024, 3072, 4096] if from_seq_length // from_block_size != to_seq_length // to_block_size: raise ValueError("Error the number of blocks needs to be same!") rand_attn = np.zeros((from_seq_length // from_block_size - 2, num_rand_blocks), dtype=np.int32) middle_seq = np.arange(1, to_seq_length // to_block_size - 1, dtype=np.int32) last = to_seq_length // to_block_size - 1 if last_idx > (2 * to_block_size): last = (last_idx // to_block_size) - 1 r = num_rand_blocks # shorthand for i in range(1, from_seq_length // from_block_size - 1): start = i - 2 end = i if i == 1: rand_attn[i - 1, :] = np.random.permutation(middle_seq[2:last])[:r] elif i == 2: rand_attn[i - 1, :] = np.random.permutation(middle_seq[3:last])[:r] elif i == from_seq_length // from_block_size - 3: rand_attn[i - 1, :] = np.random.permutation(middle_seq[:last])[:r] # Missing -3: should have been sliced till last-3 elif i == from_seq_length // from_block_size - 2: rand_attn[i - 1, :] = np.random.permutation(middle_seq[:last])[:r] # Missing -4: should have been sliced till last-4 else: if start > last: start = last rand_attn[i - 1, :] = np.random.permutation(middle_seq[:start])[:r] elif (end + 1) == last: rand_attn[i - 1, :] = np.random.permutation(middle_seq[:start])[:r] else: rand_attn[i - 1, :] = np.random.permutation( np.concatenate((middle_seq[:start], middle_seq[end + 1 : last])) )[:r] return rand_attn def _bigbird_block_rand_mask_with_head( self, from_seq_length, to_seq_length, from_block_size, to_block_size, num_heads, plan_from_length, plan_num_rand_blocks, window_block_left=1, window_block_right=1, global_block_top=1, global_block_bottom=1, global_block_left=1, global_block_right=1, ): """ Create adjacency list of random attention. Args: from_seq_length: int. length of from sequence. to_seq_length: int. length of to sequence. from_block_size: int. size of block in from sequence. to_block_size: int. size of block in to sequence. num_heads: int. total number of heads. plan_from_length: list. plan from length where num_random_blocks are chosen from. plan_num_rand_blocks: list. number of rand blocks within the plan. window_block_left: int. number of blocks of window to left of a block. window_block_right: int. number of blocks of window to right of a block. global_block_top: int. number of blocks at the top. global_block_bottom: int. number of blocks at the bottom. global_block_left: int. Number of blocks globally used to the left. global_block_right: int. Number of blocks globally used to the right. Returns: adjacency list of size num_head where each element is of size from_seq_length//from_block_size-2 by num_rand_blocks """ # using this method when from_seq_length not in [1024, 3072, 4096] if from_seq_length // from_block_size != to_seq_length // to_block_size: raise ValueError("Error the number of blocks needs to be same!") if from_seq_length not in plan_from_length: raise ValueError("Error from sequence length not in plan!") # Total number of blocks in the mmask num_blocks = from_seq_length // from_block_size # Number of blocks per plan plan_block_length = np.array(plan_from_length) // from_block_size # till when to follow plan max_plan_idx = plan_from_length.index(from_seq_length) # Random Attention adjacency list rand_attn = [ np.zeros((num_blocks, np.sum(plan_num_rand_blocks[: max_plan_idx + 1])), dtype=np.int32) for i in range(num_heads) ] # We will go iteratively over the plan blocks and pick random number of # Attention blocks from the legally allowed blocks for plan_idx in range(max_plan_idx + 1): rnd_r_cnt = 0 if plan_idx > 0: # set the row for all from_blocks starting from 0 to # plan_block_length[plan_idx-1] # column indx start fromm plan_block_length[plan_idx-1] and ends at # plan_block_length[plan_idx] if plan_num_rand_blocks[plan_idx] > 0: rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:plan_idx])) curr_r_cnt = int(np.sum(plan_num_rand_blocks[: plan_idx + 1])) for blk_rw_idx in range(global_block_top, plan_block_length[plan_idx - 1]): for h in range(num_heads): rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = self._get_single_block_row_attention( block_id=blk_rw_idx, to_start_block_id=plan_block_length[plan_idx - 1], to_end_block_id=plan_block_length[plan_idx], num_rand_blocks=plan_num_rand_blocks[plan_idx], window_block_left=window_block_left, window_block_right=window_block_right, global_block_left=global_block_left, global_block_right=global_block_right, ) for pl_id in range(plan_idx): if plan_num_rand_blocks[pl_id] == 0: continue for blk_rw_idx in range(plan_block_length[plan_idx - 1], plan_block_length[plan_idx]): rnd_r_cnt = 0 to_start_block_id = 0 if pl_id > 0: rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:pl_id])) to_start_block_id = plan_block_length[pl_id - 1] curr_r_cnt = int(np.sum(plan_num_rand_blocks[: pl_id + 1])) for h in range(num_heads): rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = self._get_single_block_row_attention( block_id=blk_rw_idx, to_start_block_id=to_start_block_id, to_end_block_id=plan_block_length[pl_id], num_rand_blocks=plan_num_rand_blocks[pl_id], window_block_left=window_block_left, window_block_right=window_block_right, global_block_left=global_block_left, global_block_right=global_block_right, ) if plan_num_rand_blocks[plan_idx] == 0: continue curr_r_cnt = int(np.sum(plan_num_rand_blocks[: plan_idx + 1])) from_start_block_id = global_block_top to_start_block_id = 0 if plan_idx > 0: rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:plan_idx])) from_start_block_id = plan_block_length[plan_idx - 1] to_start_block_id = plan_block_length[plan_idx - 1] for blk_rw_idx in range(from_start_block_id, plan_block_length[plan_idx]): for h in range(num_heads): rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = self._get_single_block_row_attention( block_id=blk_rw_idx, to_start_block_id=to_start_block_id, to_end_block_id=plan_block_length[plan_idx], num_rand_blocks=plan_num_rand_blocks[plan_idx], window_block_left=window_block_left, window_block_right=window_block_right, global_block_left=global_block_left, global_block_right=global_block_right, ) for nh in range(num_heads): rand_attn[nh] = rand_attn[nh][global_block_top : num_blocks - global_block_bottom, :] return rand_attn @staticmethod def _get_single_block_row_attention( block_id, to_start_block_id, to_end_block_id, num_rand_blocks, window_block_left=1, window_block_right=1, global_block_left=1, global_block_right=1, ): """ For a single row block get random row attention. Args: block_id: int. block id of row. to_start_block_id: int. random attention column start id. to_end_block_id: int. random attention column end id. num_rand_blocks: int. number of random blocks to be selected. window_block_left: int. number of blocks of window to left of a block. window_block_right: int. number of blocks of window to right of a block. global_block_left: int. Number of blocks globally used to the left. global_block_right: int. Number of blocks globally used to the right. Returns: row containing the random attention vector of size num_rand_blocks. """ # list of to_blocks from which to choose random attention to_block_list = np.arange(to_start_block_id, to_end_block_id, dtype=np.int32) # permute the blocks perm_block = np.random.permutation(to_block_list) # illegal blocks for the current block id, using window illegal_blocks = list(range(block_id - window_block_left, block_id + window_block_right + 1)) # Add blocks at the start and at the end illegal_blocks.extend(list(range(global_block_left))) illegal_blocks.extend(list(range(to_end_block_id - global_block_right, to_end_block_id))) # The second from_block cannot choose random attention on second last to_block if block_id == 1: illegal_blocks.append(to_end_block_id - 2) # The second last from_block cannot choose random attention on second to_block if block_id == to_end_block_id - 2: illegal_blocks.append(1) selected_random_blokcs = [] for i in range(to_end_block_id - to_start_block_id): if perm_block[i] not in illegal_blocks: selected_random_blokcs.append(perm_block[i]) if len(selected_random_blokcs) == num_rand_blocks: break return np.array(selected_random_blokcs, dtype=np.int32) class BigBirdPegasusEncoderAttention(nn.Module): def __init__(self, config, seed=None): super().__init__() self.config = config self.seed = seed self.attention_type = config.attention_type if self.attention_type == "original_full": self.self = BigBirdPegasusSelfAttention(config) elif self.attention_type == "block_sparse": self.self = BigBirdPegasusBlockSparseAttention(config, seed) else: raise ValueError( f"attention_type can either be original_full or block_sparse, but is {self.config.attention_type}" ) self.output = nn.Linear(config.hidden_size, config.hidden_size, bias=config.use_bias) def set_attention_type(self, value: str): if value not in ["original_full", "block_sparse"]: raise ValueError( f"attention_type can only be set to either 'original_full' or 'block_sparse', but is {value}" ) # attention type is already correctly set if value == self.attention_type: return self.attention_type = value if value == "original_full": # copy all weights to new full attention class attn_weights = BigBirdPegasusSelfAttention(self.config) else: # copy all weights to new sparse attention class attn_weights = BigBirdPegasusBlockSparseAttention(self.config, self.seed) attn_weights.query = self.self.query attn_weights.value = self.self.value attn_weights.key = self.self.key self.self = attn_weights self.attention_type = value if not self.training: self.self.eval() def forward( self, hidden_states, attention_mask=None, head_mask=None, past_key_value=None, output_attentions=False, band_mask=None, from_mask=None, to_mask=None, from_blocked_mask=None, to_blocked_mask=None, ): # Expand dims to enable multiplication in the self-attention module head_mask = head_mask.reshape(1, -1, 1, 1) if head_mask is not None else None if self.config.attention_type == "original_full": self_outputs = self.self( hidden_states, attention_mask, head_mask, past_key_value=past_key_value, output_attentions=output_attentions, ) else: self_outputs = self.self( hidden_states, band_mask, from_mask, to_mask, from_blocked_mask, to_blocked_mask, output_attentions ) attention_output = self.output(self_outputs[0]) outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them return outputs # Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->BigBirdPegasusDecoder class BigBirdPegasusDecoderAttention(nn.Module): """Multi-headed attention from 'Attention Is All You Need' paper""" def __init__( self, embed_dim: int, num_heads: int, dropout: float = 0.0, is_decoder: bool = False, bias: bool = True, ): super().__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = dropout self.head_dim = embed_dim // num_heads if (self.head_dim * num_heads) != self.embed_dim: raise ValueError( f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" f" and `num_heads`: {num_heads})." ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() def forward( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, _ = hidden_states.size() # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj # `past_key_value[0].shape[2] == key_value_states.shape[1]` # is checking that the `sequence_length` of the `past_key_value` is the same as # the provided `key_value_states` to support prefix tuning if ( is_cross_attention and past_key_value is not None and past_key_value[0].shape[2] == key_value_states.shape[1] ): # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = torch.cat([past_key_value[0], key_states], dim=2) value_states = torch.cat([past_key_value[1], value_states], dim=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) key_states = key_states.reshape(*proj_shape) value_states = value_states.reshape(*proj_shape) src_len = key_states.size(1) attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): raise ValueError( f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" f" {attn_weights.size()}" ) if attention_mask is not None: if attention_mask.size() != (bsz, 1, tgt_len, src_len): raise ValueError( f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" ) attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) attn_weights = nn.functional.softmax(attn_weights, dim=-1) if layer_head_mask is not None: if layer_head_mask.size() != (self.num_heads,): raise ValueError( f"Head mask for a single layer should be of size {(self.num_heads,)}, but is" f" {layer_head_mask.size()}" ) attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) if output_attentions: # this operation is a bit awkward, but it's required to # make sure that attn_weights keeps its gradient. # In order to do so, attn_weights have to be reshaped # twice and have to be reused in the following attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) else: attn_weights_reshaped = None attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training) attn_output = torch.bmm(attn_probs, value_states) if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): raise ValueError( f"`attn_output` should be of size {(bsz * self.num_heads, tgt_len, self.head_dim)}, but is" f" {attn_output.size()}" ) attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) attn_output = attn_output.transpose(1, 2) # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be # partitioned across GPUs when using tensor-parallelism. attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim) attn_output = self.out_proj(attn_output) return attn_output, attn_weights_reshaped, past_key_value class BigBirdPegasusEncoderLayer(nn.Module): def __init__(self, config: BigBirdPegasusConfig, seed=None): super().__init__() self.attention_type = config.attention_type self.embed_dim = config.d_model self.self_attn = BigBirdPegasusEncoderAttention(config, seed=seed) self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.dropout = config.dropout self.activation_fn = ACT2FN[config.activation_function] self.activation_dropout = config.activation_dropout self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim) self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim) self.final_layer_norm = nn.LayerNorm(self.embed_dim) def forward( self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, layer_head_mask: torch.Tensor, band_mask=None, from_mask=None, to_mask=None, from_blocked_mask=None, to_blocked_mask=None, output_attentions: bool = False, ): """ Args: hidden_states (`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) self_attention_outputs = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, head_mask=layer_head_mask, output_attentions=output_attentions, band_mask=band_mask, from_mask=from_mask, to_mask=to_mask, from_blocked_mask=from_blocked_mask, to_blocked_mask=to_blocked_mask, ) hidden_states = self_attention_outputs[0] hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.fc2(hidden_states) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states if hidden_states.dtype == torch.float16 and ( torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any() ): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) outputs = (hidden_states,) if output_attentions: outputs += (self_attention_outputs[1],) return outputs def set_attention_type(self, value: str): if value not in ["original_full", "block_sparse"]: raise ValueError( f"attention_type can only be set to either 'original_full' or 'block_sparse', but is {value}" ) # attention type is already correctly set if value == self.attention_type: return self.attention_type = value self.self_attn.set_attention_type(value) class BigBirdPegasusDecoderLayer(nn.Module): def __init__(self, config: BigBirdPegasusConfig): super().__init__() self.embed_dim = config.d_model self.self_attn = BigBirdPegasusDecoderAttention( embed_dim=self.embed_dim, num_heads=config.decoder_attention_heads, dropout=config.attention_dropout, is_decoder=True, bias=config.use_bias, ) self.dropout = config.dropout self.activation_fn = ACT2FN[config.activation_function] self.activation_dropout = config.activation_dropout self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.encoder_attn = BigBirdPegasusDecoderAttention( self.embed_dim, config.decoder_attention_heads, dropout=config.attention_dropout, is_decoder=True, bias=config.use_bias, ) self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim) self.fc2 = nn.Linear(config.decoder_ffn_dim, self.embed_dim) self.final_layer_norm = nn.LayerNorm(self.embed_dim) # Copied from transformers.models.mbart.modeling_mbart.MBartDecoderLayer.forward def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, cross_attn_layer_head_mask: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = True, ) -> torch.Tensor: """ Args: hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` attention_mask (`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (`torch.FloatTensor`): cross attention input to the layer of shape `(batch, seq_len, embed_dim)` encoder_attention_mask (`torch.FloatTensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size `(encoder_attention_heads,)`. cross_attn_layer_head_mask (`torch.FloatTensor`): mask for cross-attention heads in a given layer of size `(decoder_attention_heads,)`. past_key_value (`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states # Cross-Attention Block cross_attn_present_key_value = None cross_attn_weights = None if encoder_hidden_states is not None: residual = hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=cross_attn_layer_head_mask, past_key_value=cross_attn_past_key_value, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states outputs = (hidden_states,) if output_attentions: outputs += (self_attn_weights, cross_attn_weights) if use_cache: outputs += (present_key_value,) return outputs # Copied from transformers.models.bart.modeling_bart.BartClassificationHead with Bart->BigBirdPegasus class BigBirdPegasusClassificationHead(nn.Module): """Head for sentence-level classification tasks.""" def __init__( self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float, ): super().__init__() self.dense = nn.Linear(input_dim, inner_dim) self.dropout = nn.Dropout(p=pooler_dropout) self.out_proj = nn.Linear(inner_dim, num_classes) def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dropout(hidden_states) hidden_states = self.dense(hidden_states) hidden_states = torch.tanh(hidden_states) hidden_states = self.dropout(hidden_states) hidden_states = self.out_proj(hidden_states) return hidden_states class BigBirdPegasusPreTrainedModel(PreTrainedModel): config_class = BigBirdPegasusConfig base_model_prefix = "model" supports_gradient_checkpointing = True _no_split_modules = ["BigBirdPegasusEncoderLayer", "BigBirdPegasusDecoderLayer"] def _init_weights(self, module): std = self.config.init_std if isinstance(module, nn.Linear): module.weight.data.normal_(mean=0.0, std=std) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=std) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, (BigBirdPegasusDecoder, BigBirdPegasusEncoder)): module.gradient_checkpointing = value @property def dummy_inputs(self): pad_token = self.config.pad_token_id input_ids = torch.tensor([[0, 6, 10, 4, 2], [0, 8, 12, 2, pad_token]], device=self.device) dummy_inputs = { "attention_mask": input_ids.ne(pad_token), "input_ids": input_ids, } return dummy_inputs BIGBIRD_PEGASUS_START_DOCSTRING = r""" This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`BigBirdPegasusConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ BIGBIRD_PEGASUS_GENERATION_EXAMPLE = r""" Summarization example: ```python >>> from transformers import AutoTokenizer, BigBirdPegasusForConditionalGeneration >>> model = BigBirdPegasusForConditionalGeneration.from_pretrained("google/bigbird-pegasus-large-arxiv") >>> tokenizer = AutoTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv") >>> ARTICLE_TO_SUMMARIZE = ( ... "The dominant sequence transduction models are based on complex recurrent or convolutional neural " ... "networks in an encoder-decoder configuration. The best performing models also connect the encoder " ... "and decoder through an attention mechanism. We propose a new simple network architecture, the Transformer, " ... "based solely on attention mechanisms, dispensing with recurrence and convolutions entirely. " ... "Experiments on two machine translation tasks show these models to be superior in quality " ... "while being more parallelizable and requiring significantly less time to train." ... ) >>> inputs = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=4096, return_tensors="pt", truncation=True) >>> # Generate Summary >>> summary_ids = model.generate(inputs["input_ids"], num_beams=4, max_length=15) >>> tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] 'dominant sequence models are based on recurrent or convolutional neural networks .' ``` """ BIGBIRD_PEGASUS_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Provide for translation and summarization training. By default, the model will create this tensor by shifting the `input_ids` to the right, following the paper. decoder_attention_mask (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also be used by default. If you want to change padding behavior, you should read [`modeling_bigbird_pegasus._prepare_decoder_attention_mask`] and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more information on the default strategy. decoder_head_mask (`torch.Tensor` of shape `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. encoder_outputs (`tuple(tuple(torch.FloatTensor)`, *optional*): Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`) `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. decoder_inputs_embeds (`torch.FloatTensor` of shape `(batch_size, target_sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `decoder_input_ids` you can choose to directly pass an embedded representation. If `past_key_values` is used, optionally only the last `decoder_inputs_embeds` have to be input (see `past_key_values`). This is useful if you want more control over how to convert `decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix. If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value of `inputs_embeds`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ BIGBIRD_PEGASUS_STANDALONE_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`ProphetNetTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ class BigBirdPegasusEncoder(BigBirdPegasusPreTrainedModel): """ Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a [`BigBirdPegasusEncoderLayer`]. Args: config: BigBirdPegasusConfig embed_tokens (nn.Embedding): output embedding """ def __init__(self, config: BigBirdPegasusConfig, embed_tokens: Optional[nn.Embedding] = None): super().__init__(config) self.attention_type = config.attention_type self.block_size = config.block_size self.dropout = config.dropout self.layerdrop = config.encoder_layerdrop embed_dim = config.d_model self.padding_idx = config.pad_token_id self.max_source_positions = config.max_position_embeddings self.embed_scale = math.sqrt(embed_dim) if config.scale_embedding else 1.0 self.embed_tokens = nn.Embedding(config.vocab_size, embed_dim, self.padding_idx) if embed_tokens is not None: self.embed_tokens.weight = embed_tokens.weight self.embed_positions = BigBirdPegasusLearnedPositionalEmbedding( config.max_position_embeddings, embed_dim, ) self.layers = nn.ModuleList([BigBirdPegasusEncoderLayer(config, seed=i) for i in range(config.encoder_layers)]) self.layernorm_embedding = nn.LayerNorm(embed_dim) self.gradient_checkpointing = False # Initialize weights and apply final processing self.post_init() def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ): r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale embed_pos = self.embed_positions(input_shape) hidden_states = inputs_embeds + embed_pos hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) if attention_mask is None: attention_mask = torch.ones(input_shape, device=hidden_states.device) attention_mask = attention_mask.long() # in order to use block_sparse attention, sequence_length has to be at least # bigger than all global attentions: 2 * block_size # + sliding tokens: 3 * block_size # + random tokens: 2 * num_random_blocks * block_size max_tokens_to_attend = (5 + 2 * self.config.num_random_blocks) * self.config.block_size if self.attention_type == "block_sparse" and input_shape[1] <= max_tokens_to_attend: # change attention_type from block_sparse to original_full sequence_length = input_shape[1] logger.warning( "Attention type 'block_sparse' is not possible if sequence_length: " f"{sequence_length} <= num global tokens: 2 * config.block_size " "+ min. num sliding tokens: 3 * config.block_size " "+ config.num_random_blocks * config.block_size " "+ additional buffer: config.num_random_blocks * config.block_size " f"= {max_tokens_to_attend} with config.block_size " f"= {self.config.block_size}, config.num_random_blocks " f"= {self.config.num_random_blocks}. " "Changing attention type to 'original_full'..." ) self.set_attention_type("original_full") if self.attention_type == "block_sparse": padding_len, hidden_states, attention_mask = self._pad_to_block_size(hidden_states, attention_mask) else: padding_len = 0 # expand attention_mask if self.attention_type == "original_full": # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype) blocked_encoder_mask = band_mask = from_mask = to_mask = None elif self.attention_type == "block_sparse": blocked_encoder_mask, band_mask, from_mask, to_mask = self.create_masks_for_block_sparse_attn( attention_mask, self.block_size ) attention_mask = None else: raise ValueError( f"attention_type can either be original_full or block_sparse, but is {self.attention_type}" ) encoder_states = () if output_hidden_states else None all_attentions = () if output_attentions else None # check if head_mask has a correct number of layers specified if desired if head_mask is not None: if head_mask.size()[0] != len(self.layers): raise ValueError( f"The head_mask should be specified for {len(self.layers)} layers, but it is for" f" {head_mask.size()[0]}." ) for idx, encoder_layer in enumerate(self.layers): if output_hidden_states: encoder_states = encoder_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): # skip the layer layer_outputs = (None, None) else: if self.gradient_checkpointing and self.training: def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(encoder_layer), hidden_states, attention_mask, (head_mask[idx] if head_mask is not None else None), band_mask, from_mask, to_mask, blocked_encoder_mask, blocked_encoder_mask, ) else: layer_outputs = encoder_layer( hidden_states, attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), band_mask=band_mask, from_mask=from_mask, to_mask=to_mask, from_blocked_mask=blocked_encoder_mask, to_blocked_mask=blocked_encoder_mask, output_attentions=output_attentions, ) hidden_states = layer_outputs[0] if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) hidden_states = self.layernorm_embedding(hidden_states) if output_hidden_states: encoder_states = encoder_states + (hidden_states,) if padding_len > 0: # unpad `sequence_output` because the calling function is expecting a length == input_ids.size(1) hidden_states = hidden_states[:, :-padding_len] if not return_dict: return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None) self.encoder_o = hidden_states return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions ) def set_attention_type(self, value: str): if value not in ["original_full", "block_sparse"]: raise ValueError( f"attention_type can only be set to either 'original_full' or 'block_sparse', but is {value}" ) # attention type is already correctly set if value == self.attention_type: return self.attention_type = value for layer in self.layers: layer.set_attention_type(value) @staticmethod # Copied from transformers.models.big_bird.modeling_big_bird.BigBirdModel.create_masks_for_block_sparse_attn def create_masks_for_block_sparse_attn(attention_mask: torch.Tensor, block_size: int): batch_size, seq_length = attention_mask.size() if seq_length % block_size != 0: raise ValueError( f"Sequence length must be multiple of block size, but sequence length is {seq_length}, while block" f" size is {block_size}." ) def create_band_mask_from_inputs(from_blocked_mask, to_blocked_mask): """ Create 3D attention mask from a 2D tensor mask. Args: from_blocked_mask: 2D Tensor of shape [batch_size, from_seq_length//from_block_size, from_block_size]. to_blocked_mask: int32 Tensor of shape [batch_size, to_seq_length//to_block_size, to_block_size]. Returns: float Tensor of shape [batch_size, 1, from_seq_length//from_block_size-4, from_block_size, 3*to_block_size]. """ exp_blocked_to_pad = torch.cat( [to_blocked_mask[:, 1:-3], to_blocked_mask[:, 2:-2], to_blocked_mask[:, 3:-1]], dim=2 ) band_mask = torch.einsum("blq,blk->blqk", from_blocked_mask[:, 2:-2], exp_blocked_to_pad) band_mask.unsqueeze_(1) return band_mask blocked_encoder_mask = attention_mask.view(batch_size, seq_length // block_size, block_size) band_mask = create_band_mask_from_inputs(blocked_encoder_mask, blocked_encoder_mask) from_mask = attention_mask.view(batch_size, 1, seq_length, 1) to_mask = attention_mask.view(batch_size, 1, 1, seq_length) return blocked_encoder_mask, band_mask, from_mask, to_mask def _pad_to_block_size(self, hidden_states: torch.Tensor, attention_mask: torch.Tensor): """A helper function to pad tokens and mask to work with implementation of BigBird block-sparse attention.""" # padding block_size = self.config.block_size batch_size, seq_len = hidden_states.shape[:2] padding_len = (block_size - seq_len % block_size) % block_size if padding_len > 0: logger.info( f"Input ids are automatically padded from {seq_len} to {seq_len + padding_len} to be a multiple of " f"`config.block_size`: {block_size}" ) pad_id = self.config.pad_token_id device = hidden_states.device input_ids_padding = torch.ones((batch_size, padding_len), dtype=torch.long, device=device) * pad_id inputs_embeds_padding = self.embed_tokens(input_ids_padding) hidden_states = torch.cat([hidden_states, inputs_embeds_padding], dim=-2) attention_mask = nn.functional.pad( attention_mask, (0, padding_len), value=0 ) # no attention on the padding tokens return padding_len, hidden_states, attention_mask class BigBirdPegasusDecoder(BigBirdPegasusPreTrainedModel): """ Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a [`BigBirdPegasusDecoderLayer`] Args: config: BigBirdPegasusConfig embed_tokens (nn.Embedding): output embedding """ def __init__(self, config: BigBirdPegasusConfig, embed_tokens: Optional[nn.Embedding] = None): super().__init__(config) self.dropout = config.dropout self.layerdrop = config.decoder_layerdrop self.padding_idx = config.pad_token_id self.max_target_positions = config.max_position_embeddings self.embed_scale = math.sqrt(config.d_model) if config.scale_embedding else 1.0 self.embed_tokens = nn.Embedding(config.vocab_size, config.d_model, self.padding_idx) if embed_tokens is not None: self.embed_tokens.weight = embed_tokens.weight self.embed_positions = BigBirdPegasusLearnedPositionalEmbedding( config.max_position_embeddings, config.d_model, ) self.layers = nn.ModuleList([BigBirdPegasusDecoderLayer(config) for _ in range(config.decoder_layers)]) self.layernorm_embedding = nn.LayerNorm(config.d_model) self.gradient_checkpointing = False # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embed_tokens def set_input_embeddings(self, value): self.embed_tokens = value # Copied from transformers.models.bart.modeling_bart.BartDecoder._prepare_decoder_attention_mask def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length): # create causal mask # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] combined_attention_mask = None if input_shape[-1] > 1: combined_attention_mask = _make_causal_mask( input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length ).to(inputs_embeds.device) if attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]).to( inputs_embeds.device ) combined_attention_mask = ( expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask ) return combined_attention_mask def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.Tensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ): r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (`torch.LongTensor` of shape `(batch_size, encoder_sequence_length)`, *optional*): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules in decoder to avoid performing cross-attention on hidden heads. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") # past_key_values_length past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale attention_mask = self._prepare_decoder_attention_mask( attention_mask, input_shape, inputs_embeds, past_key_values_length ) # expand encoder attention mask if encoder_hidden_states is not None and encoder_attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]) # embed positions positions = self.embed_positions(input_shape, past_key_values_length) positions = positions.to(inputs_embeds.device) hidden_states = inputs_embeds + positions hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) # decoder layers all_hidden_states = () if output_hidden_states else None all_self_attns = () if output_attentions else None all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None next_decoder_cache = () if use_cache else None # check if head_mask/cross_attn_head_mask has a correct number of layers specified if desired for attn_mask, mask_name in zip([head_mask, cross_attn_head_mask], ["head_mask", "cross_attn_head_mask"]): if attn_mask is not None: if attn_mask.size()[0] != len(self.layers): raise ValueError( f"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for" f" {head_mask.size()[0]}." ) for idx, decoder_layer in enumerate(self.layers): # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) if output_hidden_states: all_hidden_states += (hidden_states,) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): continue past_key_value = past_key_values[idx] if past_key_values is not None else None if self.gradient_checkpointing and self.training: if use_cache: logger.warning_once( "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): # None for past_key_value return module(*inputs, output_attentions, use_cache) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(decoder_layer), hidden_states, attention_mask, encoder_hidden_states, encoder_attention_mask, head_mask[idx] if head_mask is not None else None, cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None, None, ) else: layer_outputs = decoder_layer( hidden_states, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), cross_attn_layer_head_mask=( cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None ), past_key_value=past_key_value, output_attentions=output_attentions, use_cache=use_cache, ) hidden_states = layer_outputs[0] if use_cache: next_decoder_cache += (layer_outputs[3 if output_attentions else 1],) if output_attentions: all_self_attns += (layer_outputs[1],) if encoder_hidden_states is not None: all_cross_attentions += (layer_outputs[2],) hidden_states = self.layernorm_embedding(hidden_states) # add hidden states from the last decoder layer if output_hidden_states: all_hidden_states += (hidden_states,) next_cache = next_decoder_cache if use_cache else None if not return_dict: return tuple( v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions] if v is not None ) return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=next_cache, hidden_states=all_hidden_states, attentions=all_self_attns, cross_attentions=all_cross_attentions, ) @add_start_docstrings( "The bare BigBirdPegasus Model outputting raw hidden-states without any specific head on top.", BIGBIRD_PEGASUS_START_DOCSTRING, ) class BigBirdPegasusModel(BigBirdPegasusPreTrainedModel): _keys_to_ignore_on_load_missing = ["encoder.embed_tokens.weight", "decoder.embed_tokens.weight"] def __init__(self, config: BigBirdPegasusConfig): super().__init__(config) padding_idx, vocab_size = config.pad_token_id, config.vocab_size self.shared = nn.Embedding(vocab_size, config.d_model, padding_idx) self.encoder = BigBirdPegasusEncoder(config, self.shared) self.decoder = BigBirdPegasusDecoder(config, self.shared) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.shared def set_input_embeddings(self, value): self.shared = value self.encoder.embed_tokens = self.shared self.decoder.embed_tokens = self.shared def get_encoder(self): return self.encoder def get_decoder(self): return self.decoder @add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=Seq2SeqModelOutput, config_class=_CONFIG_FOR_DOC, expected_output=_EXPECTED_OUTPUT_SHAPE, ) # Copied from transformers.models.bart.modeling_bart.BartModel.forward with Bart->BigBirdPegasus def forward( self, input_ids: torch.LongTensor = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[List[torch.FloatTensor]] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, Seq2SeqModelOutput]: # different to other models, BigBirdPegasus automatically creates decoder_input_ids from # input_ids if no decoder_input_ids are provided if decoder_input_ids is None and decoder_inputs_embeds is None: if input_ids is None: raise ValueError( "If no `decoder_input_ids` or `decoder_inputs_embeds` are " "passed, `input_ids` cannot be `None`. Please pass either " "`input_ids` or `decoder_input_ids` or `decoder_inputs_embeds`." ) decoder_input_ids = shift_tokens_right( input_ids, self.config.pad_token_id, self.config.decoder_start_token_id ) output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict if encoder_outputs is None: encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) # If the user passed a tuple for encoder_outputs, we wrap it in a BaseModelOutput when return_dict=True elif return_dict and not isinstance(encoder_outputs, BaseModelOutput): encoder_outputs = BaseModelOutput( last_hidden_state=encoder_outputs[0], hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None, attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None, ) # decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn) decoder_outputs = self.decoder( input_ids=decoder_input_ids, attention_mask=decoder_attention_mask, encoder_hidden_states=encoder_outputs[0], encoder_attention_mask=attention_mask, head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if not return_dict: return decoder_outputs + encoder_outputs return Seq2SeqModelOutput( last_hidden_state=decoder_outputs.last_hidden_state, past_key_values=decoder_outputs.past_key_values, decoder_hidden_states=decoder_outputs.hidden_states, decoder_attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, encoder_last_hidden_state=encoder_outputs.last_hidden_state, encoder_hidden_states=encoder_outputs.hidden_states, encoder_attentions=encoder_outputs.attentions, ) @add_start_docstrings( "The BigBirdPegasus Model with a language modeling head. Can be used for summarization.", BIGBIRD_PEGASUS_START_DOCSTRING, ) # Copied from transformers.models.bart.modeling_bart.BartForConditionalGeneration with Bart->BigBirdPegasus, BART->BIGBIRD_PEGASUS class BigBirdPegasusForConditionalGeneration(BigBirdPegasusPreTrainedModel): base_model_prefix = "model" _keys_to_ignore_on_load_missing = [ r"final_logits_bias", r"lm_head.weight", "encoder.embed_tokens.weight", "decoder.embed_tokens.weight", ] def __init__(self, config: BigBirdPegasusConfig): super().__init__(config) self.model = BigBirdPegasusModel(config) self.register_buffer("final_logits_bias", torch.zeros((1, self.model.shared.num_embeddings))) self.lm_head = nn.Linear(config.d_model, self.model.shared.num_embeddings, bias=False) # Initialize weights and apply final processing self.post_init() def get_encoder(self): return self.model.get_encoder() def get_decoder(self): return self.model.get_decoder() def resize_token_embeddings(self, new_num_tokens: int) -> nn.Embedding: new_embeddings = super().resize_token_embeddings(new_num_tokens) self._resize_final_logits_bias(new_num_tokens) return new_embeddings def _resize_final_logits_bias(self, new_num_tokens: int) -> None: old_num_tokens = self.final_logits_bias.shape[-1] if new_num_tokens <= old_num_tokens: new_bias = self.final_logits_bias[:, :new_num_tokens] else: extra_bias = torch.zeros((1, new_num_tokens - old_num_tokens), device=self.final_logits_bias.device) new_bias = torch.cat([self.final_logits_bias, extra_bias], dim=1) self.register_buffer("final_logits_bias", new_bias) def get_output_embeddings(self): return self.lm_head def set_output_embeddings(self, new_embeddings): self.lm_head = new_embeddings @add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC) @add_end_docstrings(BIGBIRD_PEGASUS_GENERATION_EXAMPLE) def forward( self, input_ids: torch.LongTensor = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[List[torch.FloatTensor]] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, Seq2SeqLMOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. Returns: """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict if labels is not None: if use_cache: logger.warning("The `use_cache` argument is changed to `False` since `labels` is provided.") use_cache = False if decoder_input_ids is None and decoder_inputs_embeds is None: decoder_input_ids = shift_tokens_right( labels, self.config.pad_token_id, self.config.decoder_start_token_id ) outputs = self.model( input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, encoder_outputs=encoder_outputs, decoder_attention_mask=decoder_attention_mask, head_mask=head_mask, decoder_head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=inputs_embeds, decoder_inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) lm_logits = self.lm_head(outputs[0]) lm_logits = lm_logits + self.final_logits_bias.to(lm_logits.device) masked_lm_loss = None if labels is not None: loss_fct = CrossEntropyLoss() masked_lm_loss = loss_fct(lm_logits.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (lm_logits,) + outputs[1:] return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output return Seq2SeqLMOutput( loss=masked_lm_loss, logits=lm_logits, past_key_values=outputs.past_key_values, decoder_hidden_states=outputs.decoder_hidden_states, decoder_attentions=outputs.decoder_attentions, cross_attentions=outputs.cross_attentions, encoder_last_hidden_state=outputs.encoder_last_hidden_state, encoder_hidden_states=outputs.encoder_hidden_states, encoder_attentions=outputs.encoder_attentions, ) def prepare_inputs_for_generation( self, decoder_input_ids, past_key_values=None, attention_mask=None, decoder_attention_mask=None, head_mask=None, decoder_head_mask=None, cross_attn_head_mask=None, use_cache=None, encoder_outputs=None, **kwargs, ): # cut decoder_input_ids if past_key_values is used if past_key_values is not None: decoder_input_ids = decoder_input_ids[:, -1:] return { "input_ids": None, # encoder_outputs is defined. input_ids not needed "encoder_outputs": encoder_outputs, "past_key_values": past_key_values, "decoder_input_ids": decoder_input_ids, "attention_mask": attention_mask, "decoder_attention_mask": decoder_attention_mask, "head_mask": head_mask, "decoder_head_mask": decoder_head_mask, "cross_attn_head_mask": cross_attn_head_mask, "use_cache": use_cache, # change this to avoid caching (presumably for debugging) } def prepare_decoder_input_ids_from_labels(self, labels: torch.Tensor): return shift_tokens_right(labels, self.config.pad_token_id, self.config.decoder_start_token_id) @staticmethod def _reorder_cache(past_key_values, beam_idx): reordered_past = () for layer_past in past_key_values: # cached cross_attention states don't have to be reordered -> they are always the same reordered_past += ( tuple(past_state.index_select(0, beam_idx) for past_state in layer_past[:2]) + layer_past[2:], ) return reordered_past @add_start_docstrings( """ BigBirdPegasus model with a sequence classification/head on top (a linear layer on top of the pooled output) e.g. for GLUE tasks. """, BIGBIRD_PEGASUS_START_DOCSTRING, ) class BigBirdPegasusForSequenceClassification(BigBirdPegasusPreTrainedModel): _keys_to_ignore_on_load_missing = ["encoder.embed_tokens.weight", "decoder.embed_tokens.weight"] def __init__(self, config: BigBirdPegasusConfig, **kwargs): super().__init__(config, **kwargs) self.model = BigBirdPegasusModel(config) self.classification_head = BigBirdPegasusClassificationHead( config.d_model, config.d_model, config.num_labels, config.classifier_dropout, ) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=Seq2SeqSequenceClassifierOutput, config_class=_CONFIG_FOR_DOC, ) # Copied from transformers.models.bart.modeling_bart.BartForSequenceClassification.forward def forward( self, input_ids: torch.LongTensor = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, Seq2SeqSequenceClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict if labels is not None: use_cache = False if input_ids is None and inputs_embeds is not None: raise NotImplementedError( f"Passing input embeddings is currently not supported for {self.__class__.__name__}" ) outputs = self.model( input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, decoder_attention_mask=decoder_attention_mask, head_mask=head_mask, decoder_head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, encoder_outputs=encoder_outputs, inputs_embeds=inputs_embeds, decoder_inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) hidden_states = outputs[0] # last hidden state eos_mask = input_ids.eq(self.config.eos_token_id).to(hidden_states.device) if len(torch.unique_consecutive(eos_mask.sum(1))) > 1: raise ValueError("All examples must have the same number of <eos> tokens.") sentence_representation = hidden_states[eos_mask, :].view(hidden_states.size(0), -1, hidden_states.size(-1))[ :, -1, : ] logits = self.classification_head(sentence_representation) loss = None if labels is not None: if self.config.problem_type is None: if self.config.num_labels == 1: self.config.problem_type = "regression" elif self.config.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): self.config.problem_type = "single_label_classification" else: self.config.problem_type = "multi_label_classification" if self.config.problem_type == "regression": loss_fct = MSELoss() if self.config.num_labels == 1: loss = loss_fct(logits.squeeze(), labels.squeeze()) else: loss = loss_fct(logits, labels) elif self.config.problem_type == "single_label_classification": loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.config.num_labels), labels.view(-1)) elif self.config.problem_type == "multi_label_classification": loss_fct = BCEWithLogitsLoss() loss = loss_fct(logits, labels) if not return_dict: output = (logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return Seq2SeqSequenceClassifierOutput( loss=loss, logits=logits, past_key_values=outputs.past_key_values, decoder_hidden_states=outputs.decoder_hidden_states, decoder_attentions=outputs.decoder_attentions, cross_attentions=outputs.cross_attentions, encoder_last_hidden_state=outputs.encoder_last_hidden_state, encoder_hidden_states=outputs.encoder_hidden_states, encoder_attentions=outputs.encoder_attentions, ) @add_start_docstrings( """ BigBirdPegasus Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layer on top of the hidden-states output to compute `span start logits` and `span end logits`). """, BIGBIRD_PEGASUS_START_DOCSTRING, ) class BigBirdPegasusForQuestionAnswering(BigBirdPegasusPreTrainedModel): _keys_to_ignore_on_load_missing = ["encoder.embed_tokens.weight", "decoder.embed_tokens.weight"] def __init__(self, config): super().__init__(config) config.num_labels = 2 self.num_labels = config.num_labels self.model = BigBirdPegasusModel(config) self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=Seq2SeqQuestionAnsweringModelOutput, config_class=_CONFIG_FOR_DOC, ) # Copied from transformers.models.bart.modeling_bart.BartForQuestionAnswering.forward def forward( self, input_ids: torch.Tensor = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[List[torch.FloatTensor]] = None, start_positions: Optional[torch.LongTensor] = None, end_positions: Optional[torch.LongTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, Seq2SeqQuestionAnsweringModelOutput]: r""" start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the start of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (*sequence_length*). Position outside of the sequence are not taken into account for computing the loss. end_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the end of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (*sequence_length*). Position outside of the sequence are not taken into account for computing the loss. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict if start_positions is not None and end_positions is not None: use_cache = False outputs = self.model( input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, decoder_attention_mask=decoder_attention_mask, head_mask=head_mask, decoder_head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, encoder_outputs=encoder_outputs, inputs_embeds=inputs_embeds, decoder_inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] logits = self.qa_outputs(sequence_output) start_logits, end_logits = logits.split(1, dim=-1) start_logits = start_logits.squeeze(-1).contiguous() end_logits = end_logits.squeeze(-1).contiguous() total_loss = None if start_positions is not None and end_positions is not None: # If we are on multi-GPU, split add a dimension if len(start_positions.size()) > 1: start_positions = start_positions.squeeze(-1) if len(end_positions.size()) > 1: end_positions = end_positions.squeeze(-1) # sometimes the start/end positions are outside our model inputs, we ignore these terms ignored_index = start_logits.size(1) start_positions = start_positions.clamp(0, ignored_index) end_positions = end_positions.clamp(0, ignored_index) loss_fct = CrossEntropyLoss(ignore_index=ignored_index) start_loss = loss_fct(start_logits, start_positions) end_loss = loss_fct(end_logits, end_positions) total_loss = (start_loss + end_loss) / 2 if not return_dict: output = ( start_logits, end_logits, ) + outputs[1:] return ((total_loss,) + output) if total_loss is not None else output return Seq2SeqQuestionAnsweringModelOutput( loss=total_loss, start_logits=start_logits, end_logits=end_logits, past_key_values=outputs.past_key_values, decoder_hidden_states=outputs.decoder_hidden_states, decoder_attentions=outputs.decoder_attentions, cross_attentions=outputs.cross_attentions, encoder_last_hidden_state=outputs.encoder_last_hidden_state, encoder_hidden_states=outputs.encoder_hidden_states, encoder_attentions=outputs.encoder_attentions, ) # Copied from transformers.models.pegasus.modeling_pegasus.PegasusDecoderWrapper with Pegasus->BigBirdPegasus class BigBirdPegasusDecoderWrapper(BigBirdPegasusPreTrainedModel): """ This wrapper class is a helper class to correctly load pretrained checkpoints when the causal language model is used in combination with the [`EncoderDecoderModel`] framework. """ def __init__(self, config): super().__init__(config) self.decoder = BigBirdPegasusDecoder(config) def forward(self, *args, **kwargs): return self.decoder(*args, **kwargs) class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel): _keys_to_ignore_on_load_missing = ["lm_head.weight"] def __init__(self, config): config = copy.deepcopy(config) config.is_decoder = True config.is_encoder_decoder = False super().__init__(config) self.model = BigBirdPegasusDecoderWrapper(config) self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.model.decoder.embed_tokens def set_input_embeddings(self, value): self.model.decoder.embed_tokens = value def get_output_embeddings(self): return self.lm_head def set_output_embeddings(self, new_embeddings): self.lm_head = new_embeddings def set_decoder(self, decoder): self.model.decoder = decoder def get_decoder(self): return self.model.decoder @replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: torch.LongTensor = None, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, CausalLMOutputWithCrossAttentions]: r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`: head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. The two additional tensors are only required when the model is used as a decoder in a Sequence to Sequence model. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. Returns: Example: ```python >>> from transformers import AutoTokenizer, BigBirdPegasusForCausalLM >>> tokenizer = AutoTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv") >>> model = BigBirdPegasusForCausalLM.from_pretrained( ... "google/bigbird-pegasus-large-arxiv", add_cross_attention=False ... ) >>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder." >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") >>> outputs = model(**inputs) >>> logits = outputs.logits ```""" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) outputs = self.model.decoder( input_ids=input_ids, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, head_mask=head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) logits = self.lm_head(outputs[0]) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (logits,) + outputs[1:] return (loss,) + output if loss is not None else output return CausalLMOutputWithCrossAttentions( loss=loss, logits=logits, past_key_values=outputs.past_key_values, hidden_states=outputs.hidden_states, attentions=outputs.attentions, cross_attentions=outputs.cross_attentions, ) def prepare_inputs_for_generation( self, input_ids, past_key_values=None, attention_mask=None, use_cache=None, **kwargs ): # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly if attention_mask is None: attention_mask = input_ids.new_ones(input_ids.shape) if past_key_values: input_ids = input_ids[:, -1:] # first step, decoder_cached_states are empty return { "input_ids": input_ids, # encoder_outputs is defined. input_ids not needed "attention_mask": attention_mask, "past_key_values": past_key_values, "use_cache": use_cache, } @staticmethod def _reorder_cache(past_key_values, beam_idx): reordered_past = () for layer_past in past_key_values: reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) return reordered_past
27182812/ChatGLM-LLaMA-chinese-insturct
6,294
src/transformers/models/bigbird_pegasus/convert_bigbird_pegasus_tf_to_pytorch.py
# coding=utf-8 # Copyright 2021 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse from typing import Dict import tensorflow as tf import torch from tqdm import tqdm from transformers import BigBirdPegasusConfig, BigBirdPegasusForConditionalGeneration INIT_COMMON = [ # tf -> hf ("/", "."), ("layer_", "layers."), ("kernel", "weight"), ("beta", "bias"), ("gamma", "weight"), ("pegasus", "model"), ] END_COMMON = [ (".output.dense", ".fc2"), ("intermediate.LayerNorm", "final_layer_norm"), ("intermediate.dense", "fc1"), ] DECODER_PATTERNS = ( INIT_COMMON + [ ("attention.self.LayerNorm", "self_attn_layer_norm"), ("attention.output.dense", "self_attn.out_proj"), ("attention.self", "self_attn"), ("attention.encdec.LayerNorm", "encoder_attn_layer_norm"), ("attention.encdec_output.dense", "encoder_attn.out_proj"), ("attention.encdec", "encoder_attn"), ("key", "k_proj"), ("value", "v_proj"), ("query", "q_proj"), ("decoder.LayerNorm", "decoder.layernorm_embedding"), ] + END_COMMON ) REMAINING_PATTERNS = ( INIT_COMMON + [ ("embeddings.word_embeddings", "shared.weight"), ("embeddings.position_embeddings", "embed_positions.weight"), ("attention.self.LayerNorm", "self_attn_layer_norm"), ("attention.output.dense", "self_attn.output"), ("attention.self", "self_attn.self"), ("encoder.LayerNorm", "encoder.layernorm_embedding"), ] + END_COMMON ) KEYS_TO_IGNORE = [ "encdec/key/bias", "encdec/query/bias", "encdec/value/bias", "self/key/bias", "self/query/bias", "self/value/bias", "encdec_output/dense/bias", "attention/output/dense/bias", ] def rename_state_dict_key(k, patterns): for tf_name, hf_name in patterns: k = k.replace(tf_name, hf_name) return k def convert_bigbird_pegasus(tf_weights: dict, config_update: dict) -> BigBirdPegasusForConditionalGeneration: cfg = BigBirdPegasusConfig(**config_update) torch_model = BigBirdPegasusForConditionalGeneration(cfg) state_dict = torch_model.state_dict() mapping = {} # separating decoder weights decoder_weights = {k: tf_weights[k] for k in tf_weights if k.startswith("pegasus/decoder")} remaining_weights = {k: tf_weights[k] for k in tf_weights if not k.startswith("pegasus/decoder")} for k, v in tqdm(decoder_weights.items(), "tf -> hf conversion"): conditions = [k.endswith(ending) for ending in KEYS_TO_IGNORE] if any(conditions): continue patterns = DECODER_PATTERNS new_k = rename_state_dict_key(k, patterns) if new_k not in state_dict: raise ValueError(f"could not find new key {new_k} in state dict. (converted from {k})") if any([True if i in k else False for i in ["dense", "query", "key", "value"]]): v = v.T mapping[new_k] = torch.from_numpy(v) assert v.shape == state_dict[new_k].shape, f"{new_k}, {k}, {v.shape}, {state_dict[new_k].shape}" for k, v in tqdm(remaining_weights.items(), "tf -> hf conversion"): conditions = [k.endswith(ending) for ending in KEYS_TO_IGNORE] if any(conditions): continue patterns = REMAINING_PATTERNS new_k = rename_state_dict_key(k, patterns) if new_k not in state_dict and k != "pegasus/embeddings/position_embeddings": raise ValueError(f"could not find new key {new_k} in state dict. (converted from {k})") if any([True if i in k else False for i in ["dense", "query", "key", "value"]]): v = v.T mapping[new_k] = torch.from_numpy(v) if k != "pegasus/embeddings/position_embeddings": assert v.shape == state_dict[new_k].shape, f"{new_k}, {k}, {v.shape}, {state_dict[new_k].shape}" mapping["model.encoder.embed_positions.weight"] = mapping["model.embed_positions.weight"] mapping["model.decoder.embed_positions.weight"] = mapping.pop("model.embed_positions.weight") missing, extra = torch_model.load_state_dict(mapping, strict=False) unexpected_missing = [ k for k in missing if k not in [ "final_logits_bias", "model.encoder.embed_tokens.weight", "model.decoder.embed_tokens.weight", "lm_head.weight", ] ] assert unexpected_missing == [], f"no matches found for the following torch keys {unexpected_missing}" assert extra == [], f"no matches found for the following tf keys {extra}" return torch_model def get_tf_weights_as_numpy(path) -> Dict: init_vars = tf.train.list_variables(path) tf_weights = {} ignore_name = ["global_step"] for name, shape in tqdm(init_vars, desc="converting tf checkpoint to dict"): skip_key = any([pat in name for pat in ignore_name]) if skip_key: continue array = tf.train.load_variable(path, name) tf_weights[name] = array return tf_weights def convert_bigbird_pegasus_ckpt_to_pytorch(ckpt_path: str, save_dir: str, config_update: dict): tf_weights = get_tf_weights_as_numpy(ckpt_path) torch_model = convert_bigbird_pegasus(tf_weights, config_update) torch_model.save_pretrained(save_dir) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--tf_ckpt_path", type=str, help="passed to tf.train.list_variables") parser.add_argument("--save_dir", default=None, type=str, help="Path to the output PyTorch model.") args = parser.parse_args() config_update = {} convert_bigbird_pegasus_ckpt_to_pytorch(args.tf_ckpt_path, args.save_dir, config_update=config_update)
27182812/ChatGLM-LLaMA-chinese-insturct
19,802
src/transformers/models/bigbird_pegasus/configuration_bigbird_pegasus.py
# coding=utf-8 # Copyright Google Research and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ BigBirdPegasus model configuration""" from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeq2SeqConfigWithPast from ...onnx.utils import compute_effective_axis_dimension from ...utils import TensorType, is_torch_available, logging logger = logging.get_logger(__name__) BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP = { "google/bigbird-pegasus-large-arxiv": ( "https://huggingface.co/google/bigbird-pegasus-large-arxiv/resolve/main/config.json" ), "google/bigbird-pegasus-large-pubmed": ( "https://huggingface.co/google/bigbird-pegasus-large-pubmed/resolve/main/config.json" ), "google/bigbird-pegasus-large-bigpatent": ( "https://huggingface.co/google/bigbird-pegasus-large-bigpatent/resolve/main/config.json" ), # See all BigBirdPegasus models at https://huggingface.co/models?filter=bigbird_pegasus } class BigBirdPegasusConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`BigBirdPegasusModel`]. It is used to instantiate an BigBirdPegasus model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the BigBirdPegasus [google/bigbird-pegasus-large-arxiv](https://huggingface.co/google/bigbird-pegasus-large-arxiv) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 96103): Vocabulary size of the BigBirdPegasus model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`BigBirdPegasusModel`]. d_model (`int`, *optional*, defaults to 1024): Dimension of the layers and the pooler layer. encoder_layers (`int`, *optional*, defaults to 16): Number of encoder layers. decoder_layers (`int`, *optional*, defaults to 16): Number of decoder layers. encoder_attention_heads (`int`, *optional*, defaults to 16): Number of attention heads for each attention layer in the Transformer encoder. decoder_attention_heads (`int`, *optional*, defaults to 16): Number of attention heads for each attention layer in the Transformer decoder. decoder_ffn_dim (`int`, *optional*, defaults to 4096): Dimension of the "intermediate" (often named feed-forward) layer in decoder. encoder_ffn_dim (`int`, *optional*, defaults to 4096): Dimension of the "intermediate" (often named feed-forward) layer in decoder. activation_function (`str` or `function`, *optional*, defaults to `"gelu_new"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"silu"` and `"gelu_new"` are supported. dropout (`float`, *optional*, defaults to 0.1): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. attention_dropout (`float`, *optional*, defaults to 0.0): The dropout ratio for the attention probabilities. activation_dropout (`float`, *optional*, defaults to 0.0): The dropout ratio for activations inside the fully connected layer. classifier_dropout (`float`, *optional*, defaults to 0.0): The dropout ratio for classifier. max_position_embeddings (`int`, *optional*, defaults to 4096): The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 1024 or 2048 or 4096). init_std (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. encoder_layerdrop (`float`, *optional*, defaults to 0.0): The LayerDrop probability for the encoder. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556) for more details. decoder_layerdrop (`float`, *optional*, defaults to 0.0): The LayerDrop probability for the decoder. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556) for more details. use_cache (`bool`, *optional*, defaults to `True`): Whether or not the model should return the last key/values attentions (not used by all models). attention_type (`str`, *optional*, defaults to `"block_sparse"`) Whether to use block sparse attention (with n complexity) as introduced in paper or original attention layer (with n^2 complexity) in encoder. Possible values are `"original_full"` and `"block_sparse"`. use_bias (`bool`, *optional*, defaults to `False`) Whether to use bias in query, key, value. block_size (`int`, *optional*, defaults to 64) Size of each block. Useful only when `attention_type == "block_sparse"`. num_random_blocks (`int`, *optional*, defaults to 3) Each query is going to attend these many number of random blocks. Useful only when `attention_type == "block_sparse"`. scale_embeddings (`bool`, *optional*, defaults to `True`) Whether to rescale embeddings with (hidden_size ** 0.5). Example: ```python >>> from transformers import BigBirdPegasusConfig, BigBirdPegasusModel >>> # Initializing a BigBirdPegasus bigbird-pegasus-base style configuration >>> configuration = BigBirdPegasusConfig() >>> # Initializing a model (with random weights) from the bigbird-pegasus-base style configuration >>> model = BigBirdPegasusModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "bigbird_pegasus" keys_to_ignore_at_inference = ["past_key_values"] attribute_map = { "num_attention_heads": "encoder_attention_heads", "hidden_size": "d_model", "attention_probs_dropout_prob": "attention_dropout", } def __init__( self, vocab_size=96103, max_position_embeddings=4096, encoder_layers=16, encoder_ffn_dim=4096, encoder_attention_heads=16, decoder_layers=16, decoder_ffn_dim=4096, decoder_attention_heads=16, encoder_layerdrop=0.0, decoder_layerdrop=0.0, use_cache=True, is_encoder_decoder=True, activation_function="gelu_new", d_model=1024, dropout=0.1, attention_dropout=0.0, activation_dropout=0.0, init_std=0.02, decoder_start_token_id=2, classifier_dropout=0.0, scale_embedding=True, pad_token_id=0, bos_token_id=2, eos_token_id=1, attention_type="block_sparse", # only for encoder block_size=64, num_random_blocks=3, use_bias=False, **kwargs, ): self.vocab_size = vocab_size self.max_position_embeddings = max_position_embeddings self.d_model = d_model self.encoder_ffn_dim = encoder_ffn_dim self.encoder_layers = encoder_layers self.encoder_attention_heads = encoder_attention_heads self.decoder_ffn_dim = decoder_ffn_dim self.decoder_layers = decoder_layers self.decoder_attention_heads = decoder_attention_heads self.dropout = dropout self.attention_dropout = attention_dropout self.activation_dropout = activation_dropout self.activation_function = activation_function self.init_std = init_std self.encoder_layerdrop = encoder_layerdrop self.decoder_layerdrop = decoder_layerdrop self.classifier_dropout = classifier_dropout self.use_cache = use_cache self.num_hidden_layers = encoder_layers self.scale_embedding = scale_embedding # scale factor will be sqrt(d_model) if True # extra config self.attention_type = attention_type self.block_size = block_size self.num_random_blocks = num_random_blocks self.use_bias = use_bias super().__init__( pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, is_encoder_decoder=is_encoder_decoder, decoder_start_token_id=decoder_start_token_id, **kwargs, ) # Copied from transformers.models.bart.configuration_bart.BartOnnxConfig class BigBirdPegasusOnnxConfig(OnnxSeq2SeqConfigWithPast): @property def inputs(self) -> Mapping[str, Mapping[int, str]]: if self.task in ["default", "seq2seq-lm"]: common_inputs = OrderedDict( [ ("input_ids", {0: "batch", 1: "encoder_sequence"}), ("attention_mask", {0: "batch", 1: "encoder_sequence"}), ] ) if self.use_past: common_inputs["decoder_input_ids"] = {0: "batch"} common_inputs["decoder_attention_mask"] = {0: "batch", 1: "past_decoder_sequence + sequence"} else: common_inputs["decoder_input_ids"] = {0: "batch", 1: "decoder_sequence"} common_inputs["decoder_attention_mask"] = {0: "batch", 1: "decoder_sequence"} if self.use_past: self.fill_with_past_key_values_(common_inputs, direction="inputs") elif self.task == "causal-lm": # TODO: figure this case out. common_inputs = OrderedDict( [ ("input_ids", {0: "batch", 1: "encoder_sequence"}), ("attention_mask", {0: "batch", 1: "encoder_sequence"}), ] ) if self.use_past: num_encoder_layers, _ = self.num_layers for i in range(num_encoder_layers): common_inputs[f"past_key_values.{i}.key"] = {0: "batch", 2: "past_sequence + sequence"} common_inputs[f"past_key_values.{i}.value"] = {0: "batch", 2: "past_sequence + sequence"} else: common_inputs = OrderedDict( [ ("input_ids", {0: "batch", 1: "encoder_sequence"}), ("attention_mask", {0: "batch", 1: "encoder_sequence"}), ("decoder_input_ids", {0: "batch", 1: "decoder_sequence"}), ("decoder_attention_mask", {0: "batch", 1: "decoder_sequence"}), ] ) return common_inputs @property def outputs(self) -> Mapping[str, Mapping[int, str]]: if self.task in ["default", "seq2seq-lm"]: common_outputs = super().outputs else: common_outputs = super(OnnxConfigWithPast, self).outputs if self.use_past: num_encoder_layers, _ = self.num_layers for i in range(num_encoder_layers): common_outputs[f"present.{i}.key"] = {0: "batch", 2: "past_sequence + sequence"} common_outputs[f"present.{i}.value"] = {0: "batch", 2: "past_sequence + sequence"} return common_outputs def _generate_dummy_inputs_for_default_and_seq2seq_lm( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: encoder_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size, seq_length, is_pair, framework ) # Generate decoder inputs decoder_seq_length = seq_length if not self.use_past else 1 decoder_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size, decoder_seq_length, is_pair, framework ) decoder_inputs = {f"decoder_{name}": tensor for name, tensor in decoder_inputs.items()} common_inputs = dict(**encoder_inputs, **decoder_inputs) if self.use_past: if not is_torch_available(): raise ValueError("Cannot generate dummy past_keys inputs without PyTorch installed.") else: import torch batch, encoder_seq_length = common_inputs["input_ids"].shape decoder_seq_length = common_inputs["decoder_input_ids"].shape[1] num_encoder_attention_heads, num_decoder_attention_heads = self.num_attention_heads encoder_shape = ( batch, num_encoder_attention_heads, encoder_seq_length, self._config.hidden_size // num_encoder_attention_heads, ) decoder_past_length = decoder_seq_length + 3 decoder_shape = ( batch, num_decoder_attention_heads, decoder_past_length, self._config.hidden_size // num_decoder_attention_heads, ) common_inputs["decoder_attention_mask"] = torch.cat( [common_inputs["decoder_attention_mask"], torch.ones(batch, decoder_past_length)], dim=1 ) common_inputs["past_key_values"] = [] # If the number of encoder and decoder layers are present in the model configuration, both are considered num_encoder_layers, num_decoder_layers = self.num_layers min_num_layers = min(num_encoder_layers, num_decoder_layers) max_num_layers = max(num_encoder_layers, num_decoder_layers) - min_num_layers remaining_side_name = "encoder" if num_encoder_layers > num_decoder_layers else "decoder" for _ in range(min_num_layers): common_inputs["past_key_values"].append( ( torch.zeros(decoder_shape), torch.zeros(decoder_shape), torch.zeros(encoder_shape), torch.zeros(encoder_shape), ) ) # TODO: test this. shape = encoder_shape if remaining_side_name == "encoder" else decoder_shape for _ in range(min_num_layers, max_num_layers): common_inputs["past_key_values"].append((torch.zeros(shape), torch.zeros(shape))) return common_inputs def _generate_dummy_inputs_for_causal_lm( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: common_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size, seq_length, is_pair, framework ) if self.use_past: if not is_torch_available(): raise ValueError("Cannot generate dummy past_keys inputs without PyTorch installed.") else: import torch batch, seqlen = common_inputs["input_ids"].shape # Not using the same length for past_key_values past_key_values_length = seqlen + 2 num_encoder_layers, _ = self.num_layers num_encoder_attention_heads, _ = self.num_attention_heads past_shape = ( batch, num_encoder_attention_heads, past_key_values_length, self._config.hidden_size // num_encoder_attention_heads, ) mask_dtype = common_inputs["attention_mask"].dtype common_inputs["attention_mask"] = torch.cat( [common_inputs["attention_mask"], torch.ones(batch, past_key_values_length, dtype=mask_dtype)], dim=1 ) common_inputs["past_key_values"] = [ (torch.zeros(past_shape), torch.zeros(past_shape)) for _ in range(num_encoder_layers) ] return common_inputs def _generate_dummy_inputs_for_sequence_classification_and_question_answering( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: # Copied from OnnxConfig.generate_dummy_inputs # Did not use super(OnnxConfigWithPast, self).generate_dummy_inputs for code clarity. # If dynamic axis (-1) we forward with a fixed dimension of 2 samples to avoid optimizations made by ONNX batch_size = compute_effective_axis_dimension( batch_size, fixed_dimension=OnnxConfig.default_fixed_batch, num_token_to_add=0 ) # If dynamic axis (-1) we forward with a fixed dimension of 8 tokens to avoid optimizations made by ONNX token_to_add = tokenizer.num_special_tokens_to_add(is_pair) seq_length = compute_effective_axis_dimension( seq_length, fixed_dimension=OnnxConfig.default_fixed_sequence, num_token_to_add=token_to_add ) # Generate dummy inputs according to compute batch and sequence dummy_input = [" ".join([tokenizer.unk_token]) * seq_length] * batch_size common_inputs = dict(tokenizer(dummy_input, return_tensors=framework)) return common_inputs def generate_dummy_inputs( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: if self.task in ["default", "seq2seq-lm"]: common_inputs = self._generate_dummy_inputs_for_default_and_seq2seq_lm( tokenizer, batch_size=batch_size, seq_length=seq_length, is_pair=is_pair, framework=framework ) elif self.task == "causal-lm": common_inputs = self._generate_dummy_inputs_for_causal_lm( tokenizer, batch_size=batch_size, seq_length=seq_length, is_pair=is_pair, framework=framework ) else: common_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size=batch_size, seq_length=seq_length, is_pair=is_pair, framework=framework ) return common_inputs def _flatten_past_key_values_(self, flattened_output, name, idx, t): if self.task in ["default", "seq2seq-lm"]: flattened_output = super()._flatten_past_key_values_(flattened_output, name, idx, t) else: flattened_output = super(OnnxSeq2SeqConfigWithPast, self)._flatten_past_key_values_( flattened_output, name, idx, t )
27182812/ChatGLM-LLaMA-chinese-insturct
36,282
src/transformers/models/megatron_gpt2/checkpoint_reshaping_and_interoperability.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import json import os import re import sys import types import torch from transformers import AutoTokenizer, GPT2Config from transformers.modeling_utils import WEIGHTS_INDEX_NAME, WEIGHTS_NAME, shard_checkpoint def add_checkpointing_args(parser): parser.add_argument("--megatron-path", type=str, default=None, help="Base directory of Megatron repository") parser.add_argument( "--convert_checkpoint_from_megatron_to_transformers", action="store_true", help=( "If True, convert a Megatron checkpoint to a Transformers checkpoint. " "If False, convert a Transformers checkpoint to a Megatron checkpoint." ), ) parser.add_argument( "--load_path", type=str, required=True, help="Path to the checkpoint to convert.", ) parser.add_argument( "--save_path", type=str, required=True, help="Path to the converted checkpoint.", ) parser.add_argument("--print-checkpoint-structure", action="store_true") return parser def add_megatron_checkpoint_args(parser): parser.add_argument( "--target_tensor_model_parallel_size", type=int, default=1, help=( "The tensor model parallel size of the converted checkpoint. " "Only used when converting a Transformers checkpoint to a Megatron checkpoint." ), ) parser.add_argument( "--target_pipeline_model_parallel_size", type=int, default=1, help=( "The pipeline model parallel size of the converted checkpoint. " "Only used when converting a Transformers checkpoint to a Megatron checkpoint." ), ) parser.add_argument( "--target_data_parallel_size", type=int, default=1, help=( "The data parallel size of the converted checkpoint. " "Only used when converting a Transformers checkpoint to a Megatron checkpoint." ), ) parser.add_argument( "--target_params_dtype", type=str, default="fp32", help=( "The dtype of the converted checkpoint. " "Only used when converting a Transformers checkpoint to a Megatron checkpoint." ), ) parser.add_argument( "--make_vocab_size_divisible_by", type=int, default=128, help=( "Pad the vocab size to be divisible by this value. " "This is added for computational efficieny reasons. " "Only used when converting a Transformers checkpoint to a Megatron checkpoint." ), ) parser.add_argument( "--use_distributed_optimizer", action="store_true", help=( "If True, use the distributed optimizer. " "Only used when converting a Transformers checkpoint to a Megatron checkpoint." ), ) return parser def add_transformers_checkpoint_args(parser): parser.add_argument( "--tokenizer_name", type=str, default=None, help=( "The name of the pre-trained tokenizer to save. " "If not None, the tokenizer will be saved. " "Only used when converting a Megatron checkpoint to a Transformers checkpoint." ), ) parser.add_argument( "--max_shard_size", type=str, default="10GB", help=( "The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size " "lower than this size. If expressed as a string, needs to be digits followed by a unit (like `5MB`). " "Only used when converting a Megatron checkpoint to a Transformers checkpoint." ), ) return parser # The simple map of names for "automated" rules. megatron_to_transformers = { "attention.dense": ".attn.c_proj.", "self_attention.dense": ".attn.c_proj.", "mlp.dense_h_to_4h": ".mlp.c_fc.", "mlp.dense_4h_to_h": ".mlp.c_proj.", } transformers_to_megatron = {v[1:-1]: k for k, v in megatron_to_transformers.items()} tensor_parallel_params = [ # megatron-lm layers to merge across tp ranks "self_attention.query_key_value.weight", "self_attention.query_key_value.bias", "self_attention.dense.weight", "mlp.dense_h_to_4h.weight", "mlp.dense_h_to_4h.bias", "mlp.dense_4h_to_h.weight", # deprecated "attention.query_key_value.weight", "attention.query_key_value.bias", "attention.dense.weight", # transformers layers to split across tp ranks "attn.c_attn.weight", "attn.c_attn.bias", "attn.c_proj.weight", "mlp.c_fc.weight", "mlp.c_fc.bias", "mlp.c_proj.weight", ] def recursive_print(name, val, spaces=0): """ Recursively print the structure of a checkpoint. This function is taken from `convert_megatron_gpt2_checkpoint.py` Args: name (str): the name of the current tensor parameter val (Tuple(int)): the shape of the current tensor parameter spaces (int): the number of spaces to print before the output for a nested structure """ # Format the message. if name is None: msg = None else: fmt = "." * max(0, spaces - 2) + "# {:" + str(50 - spaces) + "s}" msg = fmt.format(name) # Print and recurse (if needed). if isinstance(val, dict): if msg is not None: print(msg) for k in val.keys(): recursive_print(k, val[k], spaces + 2) elif isinstance(val, torch.Tensor): print(msg, ":", val.size()) else: print(msg, ":", val) def megatron_to_transformers_fix_query_key_value_ordering( param, checkpoint_version, num_splits, num_heads, hidden_size ): """ Permutes layout of param tensor to [num_splits * num_heads * hidden_size, :] for compatibility with later versions of NVIDIA Megatron-LM. The inverse operation is performed inside Megatron-LM to read checkpoints: https://github.com/NVIDIA/Megatron-LM/blob/v2.4/megatron/checkpointing.py#L209 If param is the weight tensor of the self-attention block, the returned tensor will have to be transposed one more time to be read by HuggingFace GPT2. This function is taken from `convert_megatron_gpt2_checkpoint.py` Args: param (torch.Tensor): the tensor to permute checkpoint_version (int): the version of the checkpoint. num_splits (int): the number of projections, usually 3 for (Query, Key, Value) num_heads (int): the number of attention heads hidden_size (int): the hidden size per head """ input_shape = param.size() if checkpoint_version == 1.0: # version 1.0 stores [num_heads * hidden_size * num_splits, :] saved_shape = (num_heads, hidden_size, num_splits) + input_shape[1:] param = param.view(*saved_shape) param = param.transpose(0, 2) param = param.transpose(1, 2).contiguous() elif checkpoint_version >= 2.0: # other versions store [num_heads * num_splits * hidden_size, :] saved_shape = (num_heads, num_splits, hidden_size) + input_shape[1:] param = param.view(*saved_shape) param = param.transpose(0, 1).contiguous() param = param.view(*input_shape) return param def transformers_to_megatron_fix_query_key_value_ordering( param, checkpoint_version, num_splits, num_heads, hidden_size ): """ Permutes layout of param tensor to the one compatible with respective NVIDIA Megatron-LM chekpoint versions. Input is [num_splits * num_heads * hidden_size, :] and output is [num_heads * hidden_size * num_splits, :] for version 1.0 and [num_heads * num_splits * hidden_size, :] for version 2.0 and later. If param is the weight tensor of the self-attention block, the param needs to be already transposed before calling this function. Args: param (torch.Tensor): the tensor to permute checkpoint_version (int): the version of the checkpoint. num_splits (int): the number of projections, usually 3 for (Query, Key, Value) num_heads (int): the number of attention heads hidden_size (int): the hidden size per head """ # Input is [num_splits * num_heads * hidden_size, :] input_shape = param.size() if checkpoint_version == 1.0: # version 1.0 stores [num_heads * hidden_size * num_splits, :] current_shape = (num_splits, num_heads, hidden_size) + input_shape[1:] param = param.view(*current_shape) param = param.transpose(0, 2) param = param.transpose(1, 2).contiguous() elif checkpoint_version >= 2.0: # other versions store [num_heads * num_splits * hidden_size, :] current_shape = (num_splits, num_heads, hidden_size) + input_shape[1:] param = param.view(*current_shape) param = param.transpose(0, 1).contiguous() param = param.view(*input_shape) return param def merge_transformers_sharded_states(path, num_checkpoints): """ Merge sharded checkpoints from transformers into a single checkpoint. Args: path (str): the path to the sharded checkpoints num_checkpoints (int): the number of checkpoints to merge """ state_dict = {} for i in range(1, num_checkpoints + 1): checkpoint_path = os.path.join(path, f"pytorch_model-{i:05d}-of-{num_checkpoints:05d}.bin") current_chunk = torch.load(checkpoint_path, map_location="cpu") state_dict.update(current_chunk) return state_dict def get_megatron_sharded_states(args, tp_size, pp_size, pp_rank): """ Get sharded checkpoints from NVIDIA Megatron-LM checkpoint based on the provided tensor parallel size, pipeline parallel size and pipeline parallel rank. Args: args (argparse.Namespace): the arguments to the script tp_size (int): the tensor parallel size pp_size (int): the pipeline parallel size pp_rank (int): the pipeline parallel rank """ tp_state_dicts = [] for i in range(tp_size): sub_dir_name = f"mp_rank_{i:02d}" if pp_size == 1 else f"mp_rank_{i:02d}_{pp_rank:03d}" checkpoint_name = os.listdir(os.path.join(args.load_path, sub_dir_name))[0] checkpoint_path = os.path.join(args.load_path, sub_dir_name, checkpoint_name) state_dict = torch.load(checkpoint_path, map_location="cpu") tp_state_dicts.append(state_dict) return tp_state_dicts def get_element_from_dict_by_path(d, path): """ Get element from dictionary by path. If element is not present, recursively add empty dictionaries. Args: d (dict): the dictionary to get the element from path (list): the path to the element which is delimited by "." """ path = path.split(".") for k in path: if k not in d: d[k] = {} d = d[k] return d def convert_checkpoint_from_megatron_to_transformers(args): """ Convert NVIDIA Megatron-LM checkpoint to HuggingFace Transformers checkpoint. This handles Megatron checkpoints with different tensor parallelism and pipeline parallelism sizes. It saves the converted checkpoint into shards using HuggingFace Transformers checkpoint sharding functionality. This greatly extends the functionality of `convert_megatron_gpt2_checkpoint.py` Args: args (argparse.Namespace): the arguments to the script """ # Load Megatron-LM checkpoint arguments from the state dict sub_dirs = os.listdir(args.load_path) possible_sub_dirs = ["mp_rank_00", "mp_rank_00_000"] for sub_dir in possible_sub_dirs: if sub_dir in sub_dirs: rank0_checkpoint_name = os.listdir(os.path.join(args.load_path, sub_dir))[0] rank0_checkpoint_path = os.path.join(args.load_path, sub_dir, rank0_checkpoint_name) break print(f"Loading Megatron-LM checkpoint arguments from: {rank0_checkpoint_path}") state_dict = torch.load(rank0_checkpoint_path, map_location="cpu") megatron_args = state_dict.get("args", None) if megatron_args is None: raise ValueError( "Megatron-LM checkpoint does not contain arguments. This utility only supports Megatron-LM checkpoints" " containing all the megatron arguments. This is because it loads all config related to model" " architecture, the tensor and pipeline model parallel size from the checkpoint insead of user having to" " manually specify all the details. Please save Megatron-LM checkpoint along with all the megatron" " arguments to use this utility." ) # Create Transformers GPT2 config from Megatron-LM arguments if megatron_args is not None: if megatron_args.bias_gelu_fusion: activation_function = "gelu_fast" elif megatron_args.openai_gelu: activation_function = "gelu_new" else: activation_function = "gelu" else: # in the very early days this used to be "gelu_new" activation_function = "gelu_new" vocab_size = ( megatron_args.padded_vocab_size if getattr(megatron_args, "orig_vocab_size", None) is None else megatron_args.orig_vocab_size ) print(vocab_size) config = GPT2Config( vocab_size=vocab_size, n_positions=megatron_args.max_position_embeddings, n_embd=megatron_args.hidden_size, n_layer=megatron_args.num_layers, n_head=megatron_args.num_attention_heads, n_inner=megatron_args.ffn_hidden_size, activation_function=activation_function, resid_pdrop=0.1, embd_pdrop=0.1, attn_pdrop=0.1, layer_norm_epsilon=1e-5, initializer_range=0.02, summary_type="cls_index", summary_use_proj=True, summary_activation=None, summary_proj_to_labels=True, summary_first_dropout=0.1, scale_attn_weights=True, use_cache=True, bos_token_id=vocab_size - 1, eos_token_id=vocab_size - 1, architectures=["GPT2LMHeadModel"], ) output_state_dict = {} checkpoint_version = state_dict.get("checkpoint_version", 0.0) tp_size = megatron_args.tensor_model_parallel_size pp_size = megatron_args.pipeline_model_parallel_size dtype = torch.float32 # The regex to extract layer names. layer_re = re.compile("layers\.(\d+)\.([a-z0-9_.]+)\.([a-z]+)") # Convert. print("Converting") # Embeddings print("Converting embeddings") tp_state_dicts = get_megatron_sharded_states(args, tp_size, pp_size, 0) # Convert and store the position embeddings. position_embeddings = get_element_from_dict_by_path( tp_state_dicts[0], "model.language_model.embedding.position_embeddings.weight" ) output_state_dict["transformer.wpe.weight"] = position_embeddings.to(dtype) # Convert and store the word embeddings. word_embeddings = torch.cat( [ get_element_from_dict_by_path( tp_state_dicts[tp_rank], "model.language_model.embedding.word_embeddings.weight" ) for tp_rank in range(tp_size) ], dim=0, ) word_embeddings = word_embeddings[:vocab_size].to(dtype) output_state_dict["transformer.wte.weight"] = word_embeddings # Transformer Layers print("Converting transformer layers") # The number of heads. heads = config.n_head # The hidden_size per head. hidden_size_per_head = config.n_embd // config.n_head n_positions = config.n_positions num_layers = config.num_hidden_layers // pp_size for pp_rank in range(pp_size): if pp_size > 0: print(f"Converting pipeline parallel rank {pp_rank}") tp_state_dicts = get_megatron_sharded_states(args, tp_size, pp_size, pp_rank) # The transformer. path = ( "model.language_model.transformer" if "transformer" in get_element_from_dict_by_path(tp_state_dicts[0], "model.language_model").keys() else "model.language_model.encoder" ) # Extract the layers. for key, val in get_element_from_dict_by_path(tp_state_dicts[0], path).items(): # Match the name. m = layer_re.match(key) # Stop if that's not a layer if m is None: break # The index of the layer. layer_idx = int(m.group(1)) + pp_rank * num_layers # The name of the operation. op_name = m.group(2) # Is it a weight or a bias? weight_or_bias = m.group(3) # The name of the layer. layer_name = f"transformer.h.{layer_idx}" if op_name + "." + weight_or_bias not in tensor_parallel_params: params = val.to(dtype) else: dim = 1 if op_name in ["self_attention.dense", "mlp.dense_4h_to_h", "attention.dense"] else 0 params = torch.cat( [val] + [ get_element_from_dict_by_path(tp_state_dicts[tp_rank], f"{path}")[key] for tp_rank in range(1, tp_size) ], dim=dim, ).to(dtype) # For layernorm(s), simply store the layer norm. if op_name.endswith("layernorm"): ln_name = "ln_1" if op_name.startswith("input") else "ln_2" output_state_dict[layer_name + "." + ln_name + "." + weight_or_bias] = params # Transpose the QKV matrix. elif ( op_name == "attention.query_key_value" or op_name == "self_attention.query_key_value" ) and weight_or_bias == "weight": # Insert a tensor of 1x1xDxD bias. causal_mask = torch.tril(torch.ones((n_positions, n_positions), dtype=dtype)).view( 1, 1, n_positions, n_positions ) output_state_dict[layer_name + ".attn.bias"] = causal_mask # Insert a "dummy" tensor for masked_bias. masked_bias = torch.tensor(-1e4, dtype=dtype) output_state_dict[layer_name + ".attn.masked_bias"] = masked_bias out_val = megatron_to_transformers_fix_query_key_value_ordering( params, checkpoint_version, 3, heads, hidden_size_per_head, ) # Megatron stores (3*D) x D but transformers-GPT2 expects D x 3*D. out_val = out_val.transpose(0, 1).contiguous() # Store. output_state_dict[layer_name + ".attn.c_attn.weight"] = out_val # Transpose the bias. elif ( op_name == "attention.query_key_value" or op_name == "self_attention.query_key_value" ) and weight_or_bias == "bias": out_val = megatron_to_transformers_fix_query_key_value_ordering( params, checkpoint_version, 3, heads, hidden_size_per_head ) # Store. No change of shape. output_state_dict[layer_name + ".attn.c_attn.bias"] = out_val # Transpose the weights. elif weight_or_bias == "weight": out_name = megatron_to_transformers[op_name] output_state_dict[layer_name + out_name + "weight"] = params.transpose(0, 1) # Copy the bias. elif weight_or_bias == "bias": out_name = megatron_to_transformers[op_name] output_state_dict[layer_name + out_name + "bias"] = params if config.n_layer != (layer_idx + 1): raise ValueError(f"Expected {config.n_layer} layers but found {layer_idx + 1}") # The final layernorm. print("Converting final layernorm") params = get_element_from_dict_by_path(tp_state_dicts[0], str(path)) output_state_dict["transformer.ln_f.weight"] = params["final_layernorm.weight"].to(dtype) output_state_dict["transformer.ln_f.bias"] = params["final_layernorm.bias"].to(dtype) # For LM head, transformers' wants the matrix to weight embeddings. print("Converting LM head") output_state_dict["lm_head.weight"] = word_embeddings.to(dtype) # It should be done! print("Conversion from Megatron-LM to Transformers is done!") # Print the structure of converted state dict. if args.print_checkpoint_structure: recursive_print(None, output_state_dict) # Add tokenizer class info to config # see https://github.com/huggingface/transformers/issues/13906) if args.tokenizer_name is None: tokenizer_name = "gpt2" else: tokenizer_name = args.tokenizer_name tokenizer = AutoTokenizer.from_pretrained(tokenizer_name) tokenizer_class = type(tokenizer).__name__ config.tokenizer_class = tokenizer_class # Store the config to file. print("Saving config") config.save_pretrained(args.save_path) # Save tokenizer based on args if args.tokenizer_name is not None: print(f"Adding {tokenizer_class} tokenizer files") tokenizer.save_pretrained(args.save_path) # Store the state_dict to file. max_shard_size = int(args.max_shard_size) if args.max_shard_size.isdigit() else args.max_shard_size shards, index = shard_checkpoint(output_state_dict, max_shard_size=max_shard_size) # Save the model for shard_file, shard in shards.items(): torch.save(shard, os.path.join(args.save_path, shard_file)) if index is None: print(f"Model weights saved in {os.path.join(args.save_path, WEIGHTS_NAME)}") else: save_index_file = os.path.join(args.save_path, WEIGHTS_INDEX_NAME) # Save the index as well with open(save_index_file, "w", encoding="utf-8") as f: content = json.dumps(index, indent=2, sort_keys=True) + "\n" f.write(content) print( f"The model is bigger than the maximum size per checkpoint ({args.max_shard_size}) and is going to be " f"split in {len(shards)} checkpoint shards. You can find where each parameters has been saved in the " f"index located at {save_index_file}." ) def convert_checkpoint_from_transformers_to_megatron(args): """ Convert a checkpoint from HuggingFace Transformers to Megatron-LM. This allows converted checkpoints with variable tensor parallelism and pipeline parallelism sizes. It takes as input a checkpoint from HuggingFace Transformers which can have multiple shards. Args: args (argparse.Namespace): the arguments to the script """ os.makedirs(args.save_path, exist_ok=True) # Search in directory above this sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) if args.megatron_path is not None: sys.path.insert(0, args.megatron_path) try: from megatron.tokenizer.tokenizer import _vocab_size_with_padding except ModuleNotFoundError: print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") exit(1) # load the transformers model state dict and config sub_dirs = [x for x in os.listdir(args.load_path) if x.startswith("pytorch_model")] if len(sub_dirs) == 1: checkpoint_name = "pytorch_model.bin" state_dict = torch.load(os.path.join(args.load_path, checkpoint_name), map_location="cpu") else: num_checkpoints = len(sub_dirs) - 1 state_dict = merge_transformers_sharded_states(args.load_path, num_checkpoints) config = GPT2Config.from_pretrained(args.load_path) # Saving the tracker file tracker_filepath = os.path.join(args.save_path, "latest_checkpointed_iteration.txt") with open(tracker_filepath, "w") as f: f.write("release") # create `release` dir in args.load_path release_dir = os.path.join(args.save_path, "release") os.makedirs(release_dir, exist_ok=True) # megatron args megatron_args = { "orig_vocab_size": config.vocab_size, "max_position_embeddings": config.n_positions, "hidden_size": config.n_embd, "num_layers": config.n_layer, "num_attention_heads": config.n_head, "ffn_hidden_size": config.n_inner, "tensor_model_parallel_size": args.target_tensor_model_parallel_size, "pipeline_model_parallel_size": args.target_pipeline_model_parallel_size, "data_parallel_size": args.target_data_parallel_size, "make_vocab_size_divisible_by": args.make_vocab_size_divisible_by, "rank": 0, "tokenizer_type": "GPT2BPETokenizer", } if config.activation_function == "gelu": megatron_args["bias_gelu_fusion"] = False megatron_args["openai_gelu"] = False elif config.activation_function == "gelu_fast": megatron_args["bias_gelu_fusion"] = True megatron_args["openai_gelu"] = False elif config.activation_function == "gelu_new": megatron_args["bias_gelu_fusion"] = False megatron_args["openai_gelu"] = True margs = types.SimpleNamespace() for k, v in megatron_args.items(): setattr(margs, k, v) # params dtype if args.target_params_dtype == "fp16": dtype = torch.float16 elif args.target_params_dtype == "bf16": dtype = torch.bfloat16 else: dtype = torch.float32 setattr(margs, "params_dtype", dtype) # save dummy optim state dict dummy_optim_state_dict = {} dummy_optim_state_dict["optimizer"] = { "step": 0, "param_groups": [ { "lr": 0.0, "beta1": 0.0, "beta2": 0.0, "eps": 0.0, "weight_decay": 0.0, "correct_bias": False, "params": [], } ], } if args.use_distributed_optimizer: for i in range(args.target_pipeline_model_parallel_size): for j in range(args.target_tensor_model_parallel_size): for k in range(args.target_data_parallel_size): if args.target_pipeline_model_parallel_size == 1: checkpoint_dir = f"mp_rank_{i:02d}_{k:03d}" else: checkpoint_dir = f"mp_rank_{i:02d}_{j:03d}_{k:03d}" checkpoint_dir = os.path.join(release_dir, checkpoint_dir) os.makedirs(checkpoint_dir, exist_ok=True) torch.save( dummy_optim_state_dict, os.path.join(checkpoint_dir, "optim.pt"), ) # Convert. print("Converting") output_state_dict = [] for i in range(args.target_tensor_model_parallel_size): output_state_dict.append({}) # Embedding layer print("converting embedding layer") pos_embedding = state_dict["transformer.wpe.weight"].to(dtype) word_embedding = state_dict["transformer.wte.weight"].to(dtype) orig_vocab_size = config.vocab_size padded_vocab_size = _vocab_size_with_padding(orig_vocab_size, margs) setattr(margs, "padded_vocab_size", padded_vocab_size) # Cut out extra padding we don't need if orig_vocab_size > padded_vocab_size: full_word_embed = word_embedding[0:padded_vocab_size, :] # Expanding embedding to larger size by replicating final entry elif orig_vocab_size < padded_vocab_size: padding_size = padded_vocab_size - orig_vocab_size full_word_embed = torch.cat((word_embedding, word_embedding[-1].unsqueeze(0).expand(padding_size, -1))) # Same size! else: full_word_embed = word_embedding # Split into new tensor model parallel sizes out_word_embed = torch.chunk(full_word_embed, args.target_tensor_model_parallel_size, dim=0) for i in range(args.target_tensor_model_parallel_size): pos_emb_dict = get_element_from_dict_by_path( output_state_dict[i], "model.language_model.embedding.position_embeddings" ) pos_emb_dict["weight"] = pos_embedding word_emb_dict = get_element_from_dict_by_path( output_state_dict[i], "model.language_model.embedding.word_embeddings" ) word_emb_dict["weight"] = out_word_embed[i] # Transformer layers print("converting transformer layers") if config.num_hidden_layers % args.target_tensor_model_parallel_size != 0: raise ValueError( f"Number of layers ({config.num_hidden_layers}) must be divisible by number of tensor parallelism" f" ({args.target_tensor_model_parallel_size})" ) num_layers = config.num_hidden_layers // args.target_pipeline_model_parallel_size layer_re = re.compile("transformer.h\.(\d+)\.([a-z0-9_.]+)\.([a-z]+)") # The number of heads. heads = config.n_head # The hidden_size per head. hidden_size_per_head = config.n_embd // config.n_head for pp_rank in range(args.target_pipeline_model_parallel_size): layer_offset = pp_rank * num_layers if pp_rank > 0: output_state_dict = [] for i in range(args.target_tensor_model_parallel_size): output_state_dict.append({}) for layer in range(num_layers): pp_layer_id = layer + layer_offset layers_to_copy = [ layer_name for layer_name in state_dict.keys() if layer_name.startswith(f"transformer.h.{pp_layer_id}.") ] for layer_name in layers_to_copy: m = layer_re.match(layer_name) # Stop if that's not a layer if m is None: break # The index of the layer. _ = int(m.group(1)) # The name of the operation. op_name = m.group(2) # Is it a weight or a bias? weight_or_bias = m.group(3) params = state_dict[layer_name].to(dtype) # handle layernorm if op_name.startswith("ln"): out_name = "input_layernorm" if op_name.endswith("1") else "post_attention_layernorm" layer_name = f"layers.{layer}.{out_name}.{weight_or_bias}" # handle attention K, V, Q weights elif op_name.startswith("attn.c_attn") and weight_or_bias == "weight": # transformers stores D X (3*D) but Megatron-LM expects (3*D) X D. params = params.transpose(0, 1).contiguous() params = transformers_to_megatron_fix_query_key_value_ordering( params, 3.0, 3, heads, hidden_size_per_head, ) layer_name = f"layers.{layer}.self_attention.query_key_value.{weight_or_bias}" # handle attention K, V, Q bias elif op_name.startswith("attn.c_attn") and weight_or_bias == "bias": params = transformers_to_megatron_fix_query_key_value_ordering( params, 3.0, 3, heads, hidden_size_per_head, ) layer_name = f"layers.{layer}.self_attention.query_key_value.{weight_or_bias}" # handle attention and mlp weights elif weight_or_bias == "weight": out_name = transformers_to_megatron.get(op_name, None) if out_name is None: continue params = params.transpose(0, 1) layer_name = f"layers.{layer}.{out_name}.{weight_or_bias}" # handle attention and mlp bias elif weight_or_bias == "bias": out_name = transformers_to_megatron.get(op_name, None) if out_name is None: continue layer_name = f"layers.{layer}.{out_name}.{weight_or_bias}" # skip else: continue if op_name + "." + weight_or_bias in tensor_parallel_params: dim = 1 if op_name in ["attn.c_proj", "mlp.c_proj"] else 0 params = torch.chunk(params, args.target_tensor_model_parallel_size, dim=dim) for i in range(args.target_tensor_model_parallel_size): params_dict = get_element_from_dict_by_path(output_state_dict[i], "model.language_model.encoder") params_dict[layer_name] = ( params[i] if (op_name + "." + weight_or_bias in tensor_parallel_params) else params ) if pp_rank == args.target_pipeline_model_parallel_size - 1: # handle final layernorm for weight_or_bias in ["weight", "bias"]: params = state_dict[f"transformer.ln_f.{weight_or_bias}"].to(dtype) layer_name = f"final_layernorm.{weight_or_bias}" for i in range(args.target_tensor_model_parallel_size): params_dict = get_element_from_dict_by_path(output_state_dict[i], "model.language_model.encoder") params_dict[layer_name] = params # add the LM head for i in range(args.target_tensor_model_parallel_size): params_dict = get_element_from_dict_by_path(output_state_dict[i], "model.word_embeddings_for_head") params_dict["weight"] = out_word_embed[i] # saving the state dict as per the tp_rank and pp_rank for tp_rank in range(args.target_tensor_model_parallel_size): output_state_dict[tp_rank]["checkpoint_version"] = 3.0 output_state_dict[tp_rank]["args"] = margs checkpoint_dir = ( f"mp_rank_{tp_rank:02d}" if args.target_pipeline_model_parallel_size == 1 else f"mp_rank_{tp_rank:02d}_{pp_rank:03d}" ) if args.use_distributed_optimizer: checkpoint_name = "model_rng.pt" else: checkpoint_name = "model_optim_rng.pt" output_state_dict[tp_rank]["optimizer"] = dummy_optim_state_dict["optimizer"] checkpoint_dir = os.path.join(release_dir, checkpoint_dir) os.makedirs(checkpoint_dir, exist_ok=True) checkpoint_path = os.path.join(checkpoint_dir, checkpoint_name) if args.print_checkpoint_structure: print( f"Checkpoint structure of model state dict shard belonging to TP rank {tp_rank} and PP rank" f" {pp_rank}:" ) recursive_print(None, output_state_dict[tp_rank]) torch.save(output_state_dict[tp_rank], checkpoint_path) def main(): parser = argparse.ArgumentParser() parser = add_checkpointing_args(parser) parser = add_megatron_checkpoint_args(parser) parser = add_transformers_checkpoint_args(parser) args = parser.parse_args() if args.convert_checkpoint_from_megatron_to_transformers: convert_checkpoint_from_megatron_to_transformers(args) else: convert_checkpoint_from_transformers_to_megatron(args) if __name__ == "__main__": main()
27182812/ChatGLM-LLaMA-chinese-insturct
13,626
src/transformers/models/megatron_gpt2/convert_megatron_gpt2_checkpoint.py
#################################################################################################### # Copyright (c) 2021-, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #################################################################################################### # # Note: If when running this conversion script you're getting an exception: # ModuleNotFoundError: No module named 'megatron.model.enums' # you need to tell python where to find the clone of Megatron-LM, e.g.: # # cd /tmp # git clone https://github.com/NVIDIA/Megatron-LM # PYTHONPATH=/tmp/Megatron-LM python src/transformers/models/megatron_gpt2/convert_megatron_gpt2_checkpoint.py ... # # if you already have it cloned elsewhere, simply adjust the path to the existing path # # If the training was done using a Megatron-LM fork, e.g., # https://github.com/microsoft/Megatron-DeepSpeed/ then chances are that you need to have that one # in your path, i.e., /path/to/Megatron-DeepSpeed/ # import argparse import os import re import zipfile import torch from transformers import AutoTokenizer, GPT2Config #################################################################################################### def recursive_print(name, val, spaces=0): # Format the message. if name is None: msg = None else: fmt = "." * max(0, spaces - 2) + "# {:" + str(50 - spaces) + "s}" msg = fmt.format(name) # Print and recurse (if needed). if isinstance(val, dict): if msg is not None: print(msg) for k in val.keys(): recursive_print(k, val[k], spaces + 2) elif isinstance(val, torch.Tensor): print(msg, ":", val.size()) else: print(msg, ":", val) def fix_query_key_value_ordering(param, checkpoint_version, num_splits, num_heads, hidden_size): # Permutes layout of param tensor to [num_splits * num_heads * hidden_size, :] # for compatibility with later versions of NVIDIA Megatron-LM. # The inverse operation is performed inside Megatron-LM to read checkpoints: # https://github.com/NVIDIA/Megatron-LM/blob/v2.4/megatron/checkpointing.py#L209 # If param is the weight tensor of the self-attention block, the returned tensor # will have to be transposed one more time to be read by HuggingFace GPT2. input_shape = param.size() if checkpoint_version == 1.0: # version 1.0 stores [num_heads * hidden_size * num_splits, :] saved_shape = (num_heads, hidden_size, num_splits) + input_shape[1:] param = param.view(*saved_shape) param = param.transpose(0, 2) param = param.transpose(1, 2).contiguous() elif checkpoint_version >= 2.0: # other versions store [num_heads * num_splits * hidden_size, :] saved_shape = (num_heads, num_splits, hidden_size) + input_shape[1:] param = param.view(*saved_shape) param = param.transpose(0, 1).contiguous() param = param.view(*input_shape) return param #################################################################################################### def convert_megatron_checkpoint(args, input_state_dict, config): # The converted output model. output_state_dict = {} # old versions did not store training args ds_args = input_state_dict.get("args", None) if ds_args is not None: # do not make the user write a config file when the exact dimensions/sizes are already in the checkpoint # from pprint import pprint # pprint(vars(ds_args)) config.vocab_size = ds_args.padded_vocab_size config.n_positions = ds_args.max_position_embeddings config.n_embd = ds_args.hidden_size config.n_layer = ds_args.num_layers config.n_head = ds_args.num_attention_heads config.n_inner = ds_args.ffn_hidden_size # pprint(config) # The number of heads. heads = config.n_head # The hidden_size per head. hidden_size_per_head = config.n_embd // config.n_head # Megatron-LM checkpoint version if "checkpoint_version" in input_state_dict.keys(): checkpoint_version = input_state_dict["checkpoint_version"] else: checkpoint_version = 0.0 # The model. model = input_state_dict["model"] # The language model. lm = model["language_model"] # The embeddings. embeddings = lm["embedding"] # The word embeddings. word_embeddings = embeddings["word_embeddings"]["weight"] # Truncate the embedding table to vocab_size rows. word_embeddings = word_embeddings[: config.vocab_size, :] output_state_dict["transformer.wte.weight"] = word_embeddings # The position embeddings. pos_embeddings = embeddings["position_embeddings"]["weight"] # Read the causal mask dimension (seqlen). [max_sequence_length, hidden_size] n_positions = pos_embeddings.size(0) if n_positions != config.n_positions: raise ValueError( f"pos_embeddings.max_sequence_length={n_positions} and config.n_positions={config.n_positions} don't match" ) # Store the position embeddings. output_state_dict["transformer.wpe.weight"] = pos_embeddings # The transformer. transformer = lm["transformer"] if "transformer" in lm.keys() else lm["encoder"] # The regex to extract layer names. layer_re = re.compile("layers\.(\d+)\.([a-z0-9_.]+)\.([a-z]+)") # The simple map of names for "automated" rules. megatron_to_transformers = { "attention.dense": ".attn.c_proj.", "self_attention.dense": ".attn.c_proj.", "mlp.dense_h_to_4h": ".mlp.c_fc.", "mlp.dense_4h_to_h": ".mlp.c_proj.", } # Extract the layers. for key, val in transformer.items(): # Match the name. m = layer_re.match(key) # Stop if that's not a layer if m is None: break # The index of the layer. layer_idx = int(m.group(1)) # The name of the operation. op_name = m.group(2) # Is it a weight or a bias? weight_or_bias = m.group(3) # The name of the layer. layer_name = f"transformer.h.{layer_idx}" # For layernorm(s), simply store the layer norm. if op_name.endswith("layernorm"): ln_name = "ln_1" if op_name.startswith("input") else "ln_2" output_state_dict[layer_name + "." + ln_name + "." + weight_or_bias] = val # Transpose the QKV matrix. elif ( op_name == "attention.query_key_value" or op_name == "self_attention.query_key_value" ) and weight_or_bias == "weight": # Insert a tensor of 1x1xDxD bias. causal_mask = torch.tril(torch.ones((n_positions, n_positions), dtype=torch.float16)).view( 1, 1, n_positions, n_positions ) output_state_dict[layer_name + ".attn.bias"] = causal_mask # Insert a "dummy" tensor for masked_bias. masked_bias = torch.tensor(-1e4, dtype=torch.float16) output_state_dict[layer_name + ".attn.masked_bias"] = masked_bias out_val = fix_query_key_value_ordering(val, checkpoint_version, 3, heads, hidden_size_per_head) # Megatron stores (3*D) x D but transformers-GPT2 expects D x 3*D. out_val = out_val.transpose(0, 1).contiguous() # Store. output_state_dict[layer_name + ".attn.c_attn.weight"] = out_val # Transpose the bias. elif ( op_name == "attention.query_key_value" or op_name == "self_attention.query_key_value" ) and weight_or_bias == "bias": out_val = fix_query_key_value_ordering(val, checkpoint_version, 3, heads, hidden_size_per_head) # Store. No change of shape. output_state_dict[layer_name + ".attn.c_attn.bias"] = out_val # Transpose the weights. elif weight_or_bias == "weight": out_name = megatron_to_transformers[op_name] output_state_dict[layer_name + out_name + "weight"] = val.transpose(0, 1) # Copy the bias. elif weight_or_bias == "bias": out_name = megatron_to_transformers[op_name] output_state_dict[layer_name + out_name + "bias"] = val # DEBUG. assert config.n_layer == layer_idx + 1 # The final layernorm. output_state_dict["transformer.ln_f.weight"] = transformer["final_layernorm.weight"] output_state_dict["transformer.ln_f.bias"] = transformer["final_layernorm.bias"] # For LM head, transformers' wants the matrix to weight embeddings. output_state_dict["lm_head.weight"] = word_embeddings # It should be done! return output_state_dict #################################################################################################### def main(): # Create the argument parser. parser = argparse.ArgumentParser() parser.add_argument("--print-checkpoint-structure", action="store_true") parser.add_argument( "path_to_checkpoint", type=str, help="Path to the checkpoint file (.zip archive or direct .pt file)", ) parser.add_argument( "--config_file", default="", type=str, help="An optional config json file describing the pre-trained model.", ) args = parser.parse_args() # Extract the basename. basename = os.path.dirname(args.path_to_checkpoint) # Load the model. # the .zip is very optional, let's keep it for backward compatibility print(f"Extracting PyTorch state dictionary from {args.path_to_checkpoint}") if args.path_to_checkpoint.endswith(".zip"): with zipfile.ZipFile(args.path_to_checkpoint, "r") as checkpoint: with checkpoint.open("release/mp_rank_00/model_optim_rng.pt") as pytorch_dict: input_state_dict = torch.load(pytorch_dict, map_location="cpu") else: input_state_dict = torch.load(args.path_to_checkpoint, map_location="cpu") ds_args = input_state_dict.get("args", None) # Read the config, or default to the model released by NVIDIA. if args.config_file == "": if ds_args is not None: if ds_args.bias_gelu_fusion: activation_function = "gelu_fast" elif ds_args.openai_gelu: activation_function = "gelu_new" else: activation_function = "gelu" else: # in the very early days this used to be "gelu_new" activation_function = "gelu_new" # Spell out all parameters in case the defaults change. config = GPT2Config( vocab_size=50257, n_positions=1024, n_embd=1024, n_layer=24, n_head=16, n_inner=4096, activation_function=activation_function, resid_pdrop=0.1, embd_pdrop=0.1, attn_pdrop=0.1, layer_norm_epsilon=1e-5, initializer_range=0.02, summary_type="cls_index", summary_use_proj=True, summary_activation=None, summary_proj_to_labels=True, summary_first_dropout=0.1, scale_attn_weights=True, use_cache=True, bos_token_id=50256, eos_token_id=50256, ) else: config = GPT2Config.from_json_file(args.config_file) config.architectures = ["GPT2LMHeadModel"] # Convert. print("Converting") output_state_dict = convert_megatron_checkpoint(args, input_state_dict, config) # Print the structure of converted state dict. if args.print_checkpoint_structure: recursive_print(None, output_state_dict) # Add tokenizer class info to config # see https://github.com/huggingface/transformers/issues/13906) if ds_args is not None: tokenizer_type = ds_args.tokenizer_type if tokenizer_type == "GPT2BPETokenizer": tokenizer_model_name = "gpt2" elif tokenizer_type == "PretrainedFromHF": tokenizer_model_name = ds_args.tokenizer_name_or_path else: raise ValueError(f"Unrecognized tokenizer_type {tokenizer_type}") else: tokenizer_model_name = "gpt2" tokenizer = AutoTokenizer.from_pretrained(tokenizer_model_name) tokenizer_class = type(tokenizer).__name__ config.tokenizer_class = tokenizer_class # Store the config to file. print("Saving config") config.save_pretrained(basename) # Save tokenizer based on args print(f"Adding {tokenizer_class} tokenizer files") tokenizer.save_pretrained(basename) # Store the state_dict to file. output_checkpoint_file = os.path.join(basename, "pytorch_model.bin") print(f'Saving checkpoint to "{output_checkpoint_file}"') torch.save(output_state_dict, output_checkpoint_file) #################################################################################################### if __name__ == "__main__": main() ####################################################################################################
27182812/ChatGLM-LLaMA-chinese-insturct
7,411
src/transformers/models/trajectory_transformer/configuration_trajectory_transformer.py
# coding=utf-8 # Copyright 2022 The Trajectory Transformers paper authors and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TrajectoryTransformer model configuration""" from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) TRAJECTORY_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP = { "CarlCochet/trajectory-transformer-halfcheetah-medium-v2": ( "https://huggingface.co/CarlCochet/trajectory-transformer-halfcheetah-medium-v2/resolve/main/config.json" ), # See all TrajectoryTransformer models at https://huggingface.co/models?filter=trajectory_transformer } class TrajectoryTransformerConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`TrajectoryTransformerModel`]. It is used to instantiate an TrajectoryTransformer model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the TrajectoryTransformer [CarlCochet/trajectory-transformer-halfcheetah-medium-v2](https://huggingface.co/CarlCochet/trajectory-transformer-halfcheetah-medium-v2) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 100): Vocabulary size of the TrajectoryTransformer model. Defines the number of different tokens that can be represented by the `trajectories` passed when calling [`TrajectoryTransformerModel`] action_weight (`int`, *optional*, defaults to 5): Weight of the action in the loss function reward_weight (`int`, *optional*, defaults to 1): Weight of the reward in the loss function value_weight (`int`, *optional*, defaults to 1): Weight of the value in the loss function block_size (`int`, *optional*, defaults to 249): Size of the blocks in the trajectory transformer. action_dim (`int`, *optional*, defaults to 6): Dimension of the action space. observation_dim (`int`, *optional*, defaults to 17): Dimension of the observation space. transition_dim (`int`, *optional*, defaults to 25): Dimension of the transition space. n_layer (`int`, *optional*, defaults to 4): Number of hidden layers in the Transformer encoder. n_head (`int`, *optional*, defaults to 4): Number of attention heads for each attention layer in the Transformer encoder. n_embd (`int`, *optional*, defaults to 128): Dimensionality of the embeddings and hidden states. resid_pdrop (`float`, *optional*, defaults to 0.1): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. embd_pdrop (`int`, *optional*, defaults to 0.1): The dropout ratio for the embeddings. attn_pdrop (`float`, *optional*, defaults to 0.1): The dropout ratio for the attention. hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported. max_position_embeddings (`int`, *optional*, defaults to 512): The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 512 or 1024 or 2048). initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. kaiming_initializer_range (`float, *optional*, defaults to 1): A coefficient scaling the negative slope of the kaiming initializer rectifier for EinLinear layers. use_cache (`bool`, *optional*, defaults to `True`): Whether or not the model should return the last key/values attentions (not used by all models). Only relevant if `config.is_decoder=True`. Example: ```python >>> from transformers import TrajectoryTransformerConfig, TrajectoryTransformerModel >>> # Initializing a TrajectoryTransformer CarlCochet/trajectory-transformer-halfcheetah-medium-v2 style configuration >>> configuration = TrajectoryTransformerConfig() >>> # Initializing a model (with random weights) from the CarlCochet/trajectory-transformer-halfcheetah-medium-v2 style configuration >>> model = TrajectoryTransformerModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "trajectory_transformer" keys_to_ignore_at_inference = ["past_key_values"] attribute_map = { "hidden_size": "n_embd", "num_attention_heads": "n_head", "num_hidden_layers": "n_layer", } def __init__( self, vocab_size=100, action_weight=5, reward_weight=1, value_weight=1, block_size=249, action_dim=6, observation_dim=17, transition_dim=25, n_layer=4, n_head=4, n_embd=128, embd_pdrop=0.1, attn_pdrop=0.1, resid_pdrop=0.1, learning_rate=0.0006, max_position_embeddings=512, initializer_range=0.02, layer_norm_eps=1e-12, kaiming_initializer_range=1, use_cache=True, pad_token_id=1, bos_token_id=50256, eos_token_id=50256, **kwargs, ): self.vocab_size = vocab_size self.action_weight = action_weight self.reward_weight = reward_weight self.value_weight = value_weight self.max_position_embeddings = max_position_embeddings self.block_size = block_size self.action_dim = action_dim self.observation_dim = observation_dim self.transition_dim = transition_dim self.learning_rate = learning_rate self.n_layer = n_layer self.n_head = n_head self.n_embd = n_embd self.embd_pdrop = embd_pdrop self.attn_pdrop = attn_pdrop self.resid_pdrop = resid_pdrop self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.kaiming_initializer_range = kaiming_initializer_range self.use_cache = use_cache super().__init__(pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs)
27182812/ChatGLM-LLaMA-chinese-insturct
3,139
src/transformers/models/trajectory_transformer/convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.py
# coding=utf-8 # Copyright 2022 The Trajectory Transformers paper authors and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TrajectoryTransformer pytorch checkpoint conversion""" import torch import trajectory.utils as utils from transformers import TrajectoryTransformerModel class Parser(utils.Parser): dataset: str = "halfcheetah-medium-expert-v2" config: str = "config.offline" def convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch(logbase, dataset, loadpath, epoch, device): """Converting Sequential blocks to ModuleList""" gpt, gpt_epoch = utils.load_model(logbase, dataset, loadpath, epoch=epoch, device=device) trajectory_transformer = TrajectoryTransformerModel(gpt.config) trajectory_transformer.tok_emb.load_state_dict(gpt.tok_emb.state_dict()) trajectory_transformer.pos_emb = gpt.pos_emb trajectory_transformer.drop.load_state_dict(gpt.drop.state_dict()) trajectory_transformer.ln_f.load_state_dict(gpt.ln_f.state_dict()) trajectory_transformer.head.load_state_dict(gpt.head.state_dict()) for i, block in enumerate(gpt.blocks): trajectory_transformer.blocks[i].ln1.load_state_dict(gpt.blocks[i].ln1.state_dict()) trajectory_transformer.blocks[i].ln2.load_state_dict(gpt.blocks[i].ln2.state_dict()) trajectory_transformer.blocks[i].attn.load_state_dict(gpt.blocks[i].attn.state_dict()) trajectory_transformer.blocks[i].l1.load_state_dict(gpt.blocks[i].mlp[0].state_dict()) trajectory_transformer.blocks[i].act.load_state_dict(gpt.blocks[i].mlp[1].state_dict()) trajectory_transformer.blocks[i].l2.load_state_dict(gpt.blocks[i].mlp[2].state_dict()) trajectory_transformer.blocks[i].drop.load_state_dict(gpt.blocks[i].mlp[3].state_dict()) torch.save(trajectory_transformer.state_dict(), "pytorch_model.bin") if __name__ == "__main__": """ To run this script you will need to install the original repository to run the original model. You can find it here: https://github.com/jannerm/trajectory-transformer From this repository code you can also download the original pytorch checkpoints. Run with the command: ```sh >>> python convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.py --dataset <dataset_name> ... --gpt_loadpath <path_to_original_pytorch_checkpoint> ``` """ args = Parser().parse_args("plan") convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch( args.logbase, args.dataset, args.gpt_loadpath, args.gpt_epoch, args.device )
27182812/ChatGLM-LLaMA-chinese-insturct
2,076
src/transformers/models/trajectory_transformer/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _import_structure = { "configuration_trajectory_transformer": [ "TRAJECTORY_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "TrajectoryTransformerConfig", ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_trajectory_transformer"] = [ "TRAJECTORY_TRANSFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "TrajectoryTransformerModel", "TrajectoryTransformerPreTrainedModel", "load_tf_weights_in_trajectory_transformer", ] if TYPE_CHECKING: from .configuration_trajectory_transformer import ( TRAJECTORY_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, TrajectoryTransformerConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_trajectory_transformer import ( TRAJECTORY_TRANSFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TrajectoryTransformerModel, TrajectoryTransformerPreTrainedModel, load_tf_weights_in_trajectory_transformer, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
26,132
src/transformers/models/trajectory_transformer/modeling_trajectory_transformer.py
# coding=utf-8 # Copyright 2022 The Trajectory Transformers paper authors and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch TrajectoryTransformer model.""" import math import os from dataclasses import dataclass from typing import Optional, Tuple, Union import numpy as np import torch import torch.utils.checkpoint from torch import nn from torch.nn import functional as F from ...modeling_utils import PreTrainedModel from ...utils import ( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_trajectory_transformer import TrajectoryTransformerConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "CarlCochet/trajectory-transformer-halfcheetah-medium-v2" _CONFIG_FOR_DOC = "TrajectoryTransformerConfig" TRAJECTORY_TRANSFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = [ "CarlCochet/trajectory-transformer-halfcheetah-medium-v2", # See all TrajectoryTransformer models at https://huggingface.co/models?filter=trajectory_transformer ] def load_tf_weights_in_trajectory_transformer(model, config, tf_checkpoint_path): """Load tf checkpoints in a pytorch model.""" try: import re import numpy as np import tensorflow as tf except ImportError: logger.error( "Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see " "https://www.tensorflow.org/install/ for installation instructions." ) raise tf_path = os.path.abspath(tf_checkpoint_path) logger.info(f"Converting TensorFlow checkpoint from {tf_path}") # Load weights from TF model init_vars = tf.train.list_variables(tf_path) names = [] arrays = [] for name, shape in init_vars: logger.info(f"Loading TF weight {name} with shape {shape}") array = tf.train.load_variable(tf_path, name) names.append(name) arrays.append(array) for name, array in zip(names, arrays): name = name.split("/") # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model if any( n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"] for n in name ): logger.info(f"Skipping {'/'.join(name)}") continue pointer = model for m_name in name: if re.fullmatch(r"[A-Za-z]+_\d+", m_name): scope_names = re.split(r"_(\d+)", m_name) else: scope_names = [m_name] if scope_names[0] == "kernel" or scope_names[0] == "gamma": pointer = getattr(pointer, "weight") elif scope_names[0] == "output_bias" or scope_names[0] == "beta": pointer = getattr(pointer, "bias") elif scope_names[0] == "output_weights": pointer = getattr(pointer, "weight") elif scope_names[0] == "squad": pointer = getattr(pointer, "classifier") else: try: pointer = getattr(pointer, scope_names[0]) except AttributeError: logger.info(f"Skipping {'/'.join(name)}") continue if len(scope_names) >= 2: num = int(scope_names[1]) pointer = pointer[num] if m_name[-11:] == "_embeddings": pointer = getattr(pointer, "weight") elif m_name == "kernel": array = np.transpose(array) try: if pointer.shape != array.shape: raise ValueError(f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched") except AssertionError as e: e.args += (pointer.shape, array.shape) raise logger.info(f"Initialize PyTorch weight {name}") pointer.data = torch.from_numpy(array) return model @dataclass class TrajectoryTransformerOutput(ModelOutput): """ Base class for model's outputs that also contains a pooling of the last hidden states. Args: loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided): Language modeling loss. logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). past_key_values (`Tuple[Tuple[torch.Tensor]]`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of length `config.n_layers`, containing tuples of tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`). Contains pre-computed hidden-states (key and values in the attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. GPT2Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None logits: torch.FloatTensor = None past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None hidden_states: Optional[Tuple[torch.FloatTensor]] = None attentions: Optional[Tuple[torch.FloatTensor]] = None class TrajectoryTransformerPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = TrajectoryTransformerConfig load_tf_weights = load_tf_weights_in_trajectory_transformer base_model_prefix = "trajectory_transformer" main_input_name = "trajectories" supports_gradient_checkpointing = True def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, TrajectoryTransformerModel): module.gradient_checkpointing = value def _init_weights(self, module): if isinstance(module, (nn.Linear, nn.Embedding)): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if isinstance(module, nn.Linear) and module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) elif isinstance(module, EinLinear): for i in range(module.n_models): nn.init.kaiming_uniform_(module.weight[i], a=math.sqrt(5) / self.config.kaiming_initializer_range) if module.bias is not None: fan_in, _ = nn.init._calculate_fan_in_and_fan_out(module.weight[i]) bound = (1 / math.sqrt(fan_in)) * self.config.initializer_range nn.init.uniform_(module.bias[i], -bound, bound) TRAJECTORY_TRANSFORMER_START_DOCSTRING = r""" This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`TrajectoryTransformerConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ TRAJECTORY_TRANSFORMER_INPUTS_DOCSTRING = r""" Args: trajectories (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Batch of trajectories, where a trajectory is a sequence of states, actions and rewards. past_key_values (`Tuple[Tuple[torch.Tensor]]` of length `config.n_layers`, *optional*): Contains precomputed hidden-states (key and values in the attention blocks) as computed by the model (see `past_key_values` output below). Can be used to speed up sequential decoding. The `input_ids` which have their past given to this model should not be passed as `input_ids` as they have already been computed. targets (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Desired targets used to compute the loss. attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ class EinLinear(nn.Module): def __init__(self, n_models, in_features, out_features, bias): super().__init__() self.n_models = n_models self.out_features = out_features self.in_features = in_features self.weight = nn.Parameter(torch.Tensor(n_models, out_features, in_features)) if bias: self.bias = nn.Parameter(torch.Tensor(n_models, out_features)) else: self.register_parameter("bias", None) def reset_parameters(self): for i in range(self.n_models): nn.init.kaiming_uniform_(self.weight[i], a=math.sqrt(5)) if self.bias is not None: fan_in, _ = nn.init._calculate_fan_in_and_fan_out(self.weight[i]) bound = 1 / math.sqrt(fan_in) nn.init.uniform_(self.bias[i], -bound, bound) def forward(self, input): """ Args: input (`torch.FloatTensor` of shape `(B, n_models, input_dim)`): The input to the layer. """ # [ batch_size x n_models x output_dim ] output = torch.einsum("eoi,bei->beo", self.weight, input) if self.bias is not None: raise RuntimeError() return output class CausalSelfAttention(nn.Module): def __init__(self, config): super().__init__() if config.n_embd % config.n_head != 0: raise ValueError(f"n_head ({config.n_head}) should be a divisor of n_embd ({config.n_embd})") # key, query, value projections for all heads self.key = nn.Linear(config.n_embd, config.n_embd) self.query = nn.Linear(config.n_embd, config.n_embd) self.value = nn.Linear(config.n_embd, config.n_embd) # regularization self.attn_drop = nn.Dropout(config.attn_pdrop) self.resid_drop = nn.Dropout(config.resid_pdrop) # output projection self.proj = nn.Linear(config.n_embd, config.n_embd) # causal mask to ensure that attention is only applied to the left in the input sequence self.register_buffer( "mask", torch.tril(torch.ones(config.block_size, config.block_size)).view( 1, 1, config.block_size, config.block_size ), ) # mask previous value estimates joined_dim = config.observation_dim + config.action_dim + 2 self.mask.squeeze()[:, joined_dim - 1 :: joined_dim] = 0 self.n_head = config.n_head def forward( self, hidden_states: Optional[Tuple[torch.FloatTensor]], layer_past: Optional[Tuple[torch.Tensor]] = None, use_cache: Optional[bool] = False, output_attentions: Optional[bool] = False, ): batch_size, sequence_length, embedding_dim = hidden_states.size() # calculate query, key, values for all heads in batch and move head forward to be the batch dim # [ batch_size x n_heads x sequence_length x head_dim ] key = ( self.key(hidden_states) .view(batch_size, sequence_length, self.n_head, embedding_dim // self.n_head) .transpose(1, 2) ) query = ( self.query(hidden_states) .view(batch_size, sequence_length, self.n_head, embedding_dim // self.n_head) .transpose(1, 2) ) value = ( self.value(hidden_states) .view(batch_size, sequence_length, self.n_head, embedding_dim // self.n_head) .transpose(1, 2) ) if layer_past is not None: past_key, past_value = layer_past key = torch.cat((past_key, key), dim=-2) value = torch.cat((past_value, value), dim=-2) if use_cache is True: present = (key, value) else: present = None # causal self-attention # [ batch_size x n_heads x sequence_length x sequence_length ] attn_weights = (torch.matmul(query, key.transpose(-2, -1))) * (1.0 / math.sqrt(key.size(-1))) attn_weights = attn_weights.masked_fill( self.mask[:, :, :sequence_length, :sequence_length] == 0, torch.finfo(attn_weights.dtype).min ) attn_weights = F.softmax(attn_weights, dim=-1) self._attn_map = attn_weights.clone() attn_weights = self.attn_drop(attn_weights) output = torch.matmul(attn_weights, value) # [ batch_size x sequence_length x embedding_dim ] # re-assemble all head outputs side by side output = output.transpose(1, 2).contiguous().view(batch_size, sequence_length, embedding_dim) # output projection output = self.resid_drop(self.proj(output)) outputs = (output, present) if output_attentions: outputs += (attn_weights,) return outputs class Block(nn.Module): def __init__(self, config): super().__init__() self.ln1 = nn.LayerNorm(config.n_embd) self.ln2 = nn.LayerNorm(config.n_embd) self.attn = CausalSelfAttention(config) # MLP self.l1 = nn.Linear(config.n_embd, 4 * config.n_embd) self.act = nn.GELU() self.l2 = nn.Linear(4 * config.n_embd, config.n_embd) self.drop = nn.Dropout(config.resid_pdrop) def forward( self, hidden_states: Optional[Tuple[torch.FloatTensor]], layer_past: Optional[Tuple[torch.Tensor]] = None, use_cache: Optional[bool] = False, output_attentions: Optional[bool] = False, ): residual = hidden_states hidden_states = self.ln1(hidden_states) attn_outputs = self.attn( hidden_states, layer_past=layer_past, use_cache=use_cache, output_attentions=output_attentions ) attn_output = attn_outputs[0] outputs = attn_outputs[1:] hidden_states = attn_output + residual residual = hidden_states hidden_states = self.ln2(hidden_states) hidden_states = self.l1(hidden_states) hidden_states = self.act(hidden_states) hidden_states = self.l2(hidden_states) hidden_states = residual + self.drop(hidden_states) if use_cache: outputs = (hidden_states,) + outputs else: outputs = (hidden_states,) + outputs[1:] return outputs @add_start_docstrings( "The bare TrajectoryTransformer Model transformer outputting raw hidden-states without any specific head on top.", TRAJECTORY_TRANSFORMER_START_DOCSTRING, ) class TrajectoryTransformerModel(TrajectoryTransformerPreTrainedModel): """the full GPT language model, with a context size of block_size""" def __init__(self, config): super().__init__(config) # input embedding stem (+1 for stop token) self.tok_emb = nn.Embedding(config.vocab_size * config.transition_dim + 1, config.n_embd) self.pos_emb = nn.Parameter(torch.zeros(1, config.block_size, config.n_embd)) self.drop = nn.Dropout(config.embd_pdrop) # transformer self.blocks = nn.ModuleList([Block(config) for _ in range(config.n_layer)]) # decoder head self.ln_f = nn.LayerNorm(config.n_embd) self.head = EinLinear(config.transition_dim, config.n_embd, config.vocab_size + 1, bias=False) self.vocab_size = config.vocab_size self.stop_token = config.vocab_size * config.transition_dim self.block_size = config.block_size self.observation_dim = config.observation_dim self.action_dim = config.action_dim self.transition_dim = config.transition_dim self.embedding_dim = config.n_embd self.action_weight = config.action_weight self.reward_weight = config.reward_weight self.value_weight = config.value_weight self.gradient_checkpointing = False self.post_init() def get_block_size(self): return self.block_size def offset_tokens(self, trajectories): _, sequence_length = trajectories.shape n_states = int(np.ceil(sequence_length / self.transition_dim)) offsets = torch.arange(self.transition_dim) * self.vocab_size offsets = offsets.repeat(n_states).to(trajectories.device) offset_trajectories = trajectories + offsets[:sequence_length] offset_trajectories[trajectories == self.vocab_size] = self.stop_token return offset_trajectories def pad_to_full_observation(self, hidden_states): batch_size, sequence_length, _ = hidden_states.shape n_pad = (self.transition_dim - sequence_length % self.transition_dim) % self.transition_dim padding = torch.zeros(batch_size, n_pad, self.embedding_dim, device=hidden_states.device) # [ batch_size x padded_sequence_length' x embedding_dim ] hidden_states_pad = torch.cat([hidden_states, padding], dim=1) hidden_states_pad = hidden_states_pad.view(-1, self.transition_dim, self.embedding_dim) return hidden_states_pad, n_pad @add_start_docstrings_to_model_forward( TRAJECTORY_TRANSFORMER_INPUTS_DOCSTRING.format("batch_size, sequence_length") ) @replace_return_docstrings(output_type=TrajectoryTransformerOutput, config_class=_CONFIG_FOR_DOC) def forward( self, trajectories: Optional[torch.LongTensor] = None, past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None, targets: Optional[torch.FloatTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.Tensor], TrajectoryTransformerOutput]: r""" Returns: Examples: ```python >>> from transformers import TrajectoryTransformerModel >>> import torch >>> model = TrajectoryTransformerModel.from_pretrained( ... "CarlCochet/trajectory-transformer-halfcheetah-medium-v2" ... ) >>> model.to(device) >>> model.eval() >>> observations_dim, action_dim, batch_size = 17, 6, 256 >>> seq_length = observations_dim + action_dim + 1 >>> trajectories = torch.LongTensor([np.random.permutation(self.seq_length) for _ in range(batch_size)]).to( ... device ... ) >>> targets = torch.LongTensor([np.random.permutation(self.seq_length) for _ in range(batch_size)]).to(device) >>> outputs = model( ... trajectories, ... targets=targets, ... use_cache=True, ... output_attentions=True, ... output_hidden_states=True, ... return_dict=True, ... ) ``` """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) if past_key_values is None: past_key_values = tuple([None] * len(self.blocks)) batch_size, sequence_length = trajectories.size() if sequence_length > self.block_size: raise ValueError("Cannot forward, model block size is exhausted.") offset_trajectories = self.offset_tokens(trajectories) # [ batch_size x sequence_length x embedding_dim ] # forward the GPT model token_embeddings = self.tok_emb(offset_trajectories) # each index maps to a (learnable) vector position_embeddings = self.pos_emb[:, :sequence_length, :] # each position maps to a (learnable) vector hidden_states = self.drop(token_embeddings + position_embeddings) presents = () if use_cache else None all_self_attentions = () if output_attentions else None all_hidden_states = () if output_hidden_states else None for i, (block, layer_past) in enumerate(zip(self.blocks, past_key_values)): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if self.gradient_checkpointing and self.training: if use_cache: logger.warning_once( "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs) return custom_forward outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(block), hidden_states, layer_past, use_cache, output_attentions, ) else: outputs = block(hidden_states, layer_past, use_cache, output_attentions) hidden_states = outputs[0] if use_cache is True: presents = presents + (outputs[1],) if output_attentions: all_self_attentions = all_self_attentions + (outputs[2 if use_cache else 1],) # [ batch_size x sequence_length x embedding_dim ] hidden_state = self.ln_f(hidden_states) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) hidden_states_pad, n_pad = self.pad_to_full_observation(hidden_state) logits = self.head(hidden_states_pad) logits = logits.reshape(batch_size, sequence_length + n_pad, self.vocab_size + 1) logits = logits[:, :sequence_length] # if we are given some desired targets also calculate the loss if targets is not None: loss = F.cross_entropy(logits.reshape(-1, logits.size(-1)), targets.view(-1), reduction="none") if self.action_weight != 1 or self.reward_weight != 1 or self.value_weight != 1: # make weights n_states = int(np.ceil(sequence_length / self.transition_dim)) weights = torch.cat( [ torch.ones(self.observation_dim, device=trajectories.device), torch.ones(self.action_dim, device=trajectories.device) * self.action_weight, torch.ones(1, device=trajectories.device) * self.reward_weight, torch.ones(1, device=trajectories.device) * self.value_weight, ] ) weights = weights.repeat(n_states) weights = weights[1:].repeat(batch_size, 1) loss = loss * weights.view(-1) loss = (loss * attention_mask.view(-1)).mean() else: loss = None if not return_dict: return tuple(v for v in [loss, logits, presents, all_hidden_states, all_self_attentions] if v is not None) return TrajectoryTransformerOutput( loss=loss, logits=logits, past_key_values=presents, hidden_states=all_hidden_states, attentions=all_self_attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
3,294
src/transformers/models/mt5/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) if is_sentencepiece_available(): from ..t5.tokenization_t5 import T5Tokenizer else: from ...utils.dummy_sentencepiece_objects import T5Tokenizer MT5Tokenizer = T5Tokenizer if is_tokenizers_available(): from ..t5.tokenization_t5_fast import T5TokenizerFast else: from ...utils.dummy_tokenizers_objects import T5TokenizerFast MT5TokenizerFast = T5TokenizerFast _import_structure = {"configuration_mt5": ["MT5Config", "MT5OnnxConfig"]} try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_mt5"] = [ "MT5EncoderModel", "MT5ForConditionalGeneration", "MT5Model", "MT5PreTrainedModel", "MT5Stack", ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_tf_mt5"] = ["TFMT5EncoderModel", "TFMT5ForConditionalGeneration", "TFMT5Model"] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_flax_mt5"] = ["FlaxMT5EncoderModel", "FlaxMT5ForConditionalGeneration", "FlaxMT5Model"] if TYPE_CHECKING: from .configuration_mt5 import MT5Config, MT5OnnxConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_mt5 import MT5EncoderModel, MT5ForConditionalGeneration, MT5Model, MT5PreTrainedModel, MT5Stack try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_mt5 import TFMT5EncoderModel, TFMT5ForConditionalGeneration, TFMT5Model try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_mt5 import FlaxMT5EncoderModel, FlaxMT5ForConditionalGeneration, FlaxMT5Model else: import sys sys.modules[__name__] = _LazyModule( __name__, globals()["__file__"], _import_structure, extra_objects={"MT5Tokenizer": MT5Tokenizer, "MT5TokenizerFast": MT5TokenizerFast}, module_spec=__spec__, )
27182812/ChatGLM-LLaMA-chinese-insturct
91,557
src/transformers/models/mt5/modeling_mt5.py
# coding=utf-8 # Copyright 2020 Mesh TensorFlow authors, T5 Authors and HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch mT5 model.""" import copy import math import os import warnings from typing import Optional, Tuple, Union import torch from torch import nn from torch.nn import CrossEntropyLoss from torch.utils.checkpoint import checkpoint from ...activations import ACT2FN from ...modeling_outputs import ( BaseModelOutput, BaseModelOutputWithPastAndCrossAttentions, Seq2SeqLMOutput, Seq2SeqModelOutput, ) from ...modeling_utils import PreTrainedModel from ...pytorch_utils import find_pruneable_heads_and_indices, prune_linear_layer from ...utils import ( DUMMY_INPUTS, DUMMY_MASK, add_start_docstrings, add_start_docstrings_to_model_forward, is_torch_fx_proxy, logging, replace_return_docstrings, ) from ...utils.model_parallel_utils import assert_device_map, get_device_map from .configuration_mt5 import MT5Config logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = "MT5Config" _CHECKPOINT_FOR_DOC = "mt5-small" PARALLELIZE_DOCSTRING = r""" This is an experimental feature and is a subject to change at a moment's notice. Uses a device map to distribute attention modules of the model across several devices. If no device map is given, it will evenly distribute blocks across all devices. Args: device_map (`Dict[int, list]`, optional, defaults to None): A dictionary that maps attention modules to devices. Note that the embedding module and LMHead are always automatically mapped to the first device (for esoteric reasons). That means that the first device should have fewer attention modules mapped to it than other devices. For reference, the mt5 models have the following number of attention modules: - mt5-small: 6 - mt5-base: 12 - mt5-large: 24 - mt5-xl: 24 - mt5-xxl: 24 Example: ```python # Here is an example of a device map on a machine with 4 GPUs using mt5-xl, which has a total of 24 attention modules: model = MT5ForConditionalGeneration.from_pretrained("mt5-xl") device_map = { 0: [0, 1, 2], 1: [3, 4, 5, 6, 7, 8, 9], 2: [10, 11, 12, 13, 14, 15, 16], 3: [17, 18, 19, 20, 21, 22, 23], } model.parallelize(device_map) ``` """ DEPARALLELIZE_DOCSTRING = r""" Moves the model to cpu from a model parallel state. Example: ```python # On a 4 GPU machine with mt5-xl: model = MT5ForConditionalGeneration.from_pretrained("Mt5-xl") device_map = { 0: [0, 1, 2], 1: [3, 4, 5, 6, 7, 8, 9], 2: [10, 11, 12, 13, 14, 15, 16], 3: [17, 18, 19, 20, 21, 22, 23], } model.parallelize(device_map) # Splits the model across several devices model.deparallelize() # Put the model back on cpu and cleans memory by calling torch.cuda.empty_cache() ``` """ # Copied from transformers.models.t5.modeling_t5.T5LayerNorm with T5->MT5 class MT5LayerNorm(nn.Module): def __init__(self, hidden_size, eps=1e-6): """ Construct a layernorm module in the MT5 style. No bias and no subtraction of mean. """ super().__init__() self.weight = nn.Parameter(torch.ones(hidden_size)) self.variance_epsilon = eps def forward(self, hidden_states): # MT5 uses a layer_norm which only scales and doesn't shift, which is also known as Root Mean # Square Layer Normalization https://arxiv.org/abs/1910.07467 thus varience is calculated # w/o mean and there is no bias. Additionally we want to make sure that the accumulation for # half-precision inputs is done in fp32 variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) # convert into half-precision if necessary if self.weight.dtype in [torch.float16, torch.bfloat16]: hidden_states = hidden_states.to(self.weight.dtype) return self.weight * hidden_states # Copied from transformers.models.t5.modeling_t5.T5DenseActDense with T5->MT5 class MT5DenseActDense(nn.Module): def __init__(self, config: MT5Config): super().__init__() self.wi = nn.Linear(config.d_model, config.d_ff, bias=False) self.wo = nn.Linear(config.d_ff, config.d_model, bias=False) self.dropout = nn.Dropout(config.dropout_rate) self.act = ACT2FN[config.dense_act_fn] def forward(self, hidden_states): hidden_states = self.wi(hidden_states) hidden_states = self.act(hidden_states) hidden_states = self.dropout(hidden_states) if ( isinstance(self.wo.weight, torch.Tensor) and hidden_states.dtype != self.wo.weight.dtype and self.wo.weight.dtype != torch.int8 ): hidden_states = hidden_states.to(self.wo.weight.dtype) hidden_states = self.wo(hidden_states) return hidden_states # Copied from transformers.models.t5.modeling_t5.T5DenseGatedActDense with T5->MT5 class MT5DenseGatedActDense(nn.Module): def __init__(self, config: MT5Config): super().__init__() self.wi_0 = nn.Linear(config.d_model, config.d_ff, bias=False) self.wi_1 = nn.Linear(config.d_model, config.d_ff, bias=False) self.wo = nn.Linear(config.d_ff, config.d_model, bias=False) self.dropout = nn.Dropout(config.dropout_rate) self.act = ACT2FN[config.dense_act_fn] def forward(self, hidden_states): hidden_gelu = self.act(self.wi_0(hidden_states)) hidden_linear = self.wi_1(hidden_states) hidden_states = hidden_gelu * hidden_linear hidden_states = self.dropout(hidden_states) # To make 8bit quantization work for google/flan-t5-xxl, self.wo is kept in float32. # See https://github.com/huggingface/transformers/issues/20287 # we also make sure the weights are not in `int8` in case users will force `_keep_in_fp32_modules` to be `None`` if ( isinstance(self.wo.weight, torch.Tensor) and hidden_states.dtype != self.wo.weight.dtype and self.wo.weight.dtype != torch.int8 ): hidden_states = hidden_states.to(self.wo.weight.dtype) hidden_states = self.wo(hidden_states) return hidden_states # Copied from transformers.models.t5.modeling_t5.T5LayerFF with T5->MT5 class MT5LayerFF(nn.Module): def __init__(self, config: MT5Config): super().__init__() if config.is_gated_act: self.DenseReluDense = MT5DenseGatedActDense(config) else: self.DenseReluDense = MT5DenseActDense(config) self.layer_norm = MT5LayerNorm(config.d_model, eps=config.layer_norm_epsilon) self.dropout = nn.Dropout(config.dropout_rate) def forward(self, hidden_states): forwarded_states = self.layer_norm(hidden_states) forwarded_states = self.DenseReluDense(forwarded_states) hidden_states = hidden_states + self.dropout(forwarded_states) return hidden_states # Copied from transformers.models.t5.modeling_t5.T5Attention with T5->MT5 class MT5Attention(nn.Module): def __init__(self, config: MT5Config, has_relative_attention_bias=False): super().__init__() self.is_decoder = config.is_decoder self.has_relative_attention_bias = has_relative_attention_bias self.relative_attention_num_buckets = config.relative_attention_num_buckets self.relative_attention_max_distance = config.relative_attention_max_distance self.d_model = config.d_model self.key_value_proj_dim = config.d_kv self.n_heads = config.num_heads self.dropout = config.dropout_rate self.inner_dim = self.n_heads * self.key_value_proj_dim # Mesh TensorFlow initialization to avoid scaling before softmax self.q = nn.Linear(self.d_model, self.inner_dim, bias=False) self.k = nn.Linear(self.d_model, self.inner_dim, bias=False) self.v = nn.Linear(self.d_model, self.inner_dim, bias=False) self.o = nn.Linear(self.inner_dim, self.d_model, bias=False) if self.has_relative_attention_bias: self.relative_attention_bias = nn.Embedding(self.relative_attention_num_buckets, self.n_heads) self.pruned_heads = set() self.gradient_checkpointing = False def prune_heads(self, heads): if len(heads) == 0: return heads, index = find_pruneable_heads_and_indices( heads, self.n_heads, self.key_value_proj_dim, self.pruned_heads ) # Prune linear layers self.q = prune_linear_layer(self.q, index) self.k = prune_linear_layer(self.k, index) self.v = prune_linear_layer(self.v, index) self.o = prune_linear_layer(self.o, index, dim=1) # Update hyper params self.n_heads = self.n_heads - len(heads) self.inner_dim = self.key_value_proj_dim * self.n_heads self.pruned_heads = self.pruned_heads.union(heads) @staticmethod def _relative_position_bucket(relative_position, bidirectional=True, num_buckets=32, max_distance=128): """ Adapted from Mesh Tensorflow: https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py#L593 Translate relative position to a bucket number for relative attention. The relative position is defined as memory_position - query_position, i.e. the distance in tokens from the attending position to the attended-to position. If bidirectional=False, then positive relative positions are invalid. We use smaller buckets for small absolute relative_position and larger buckets for larger absolute relative_positions. All relative positions >=max_distance map to the same bucket. All relative positions <=-max_distance map to the same bucket. This should allow for more graceful generalization to longer sequences than the model has been trained on Args: relative_position: an int32 Tensor bidirectional: a boolean - whether the attention is bidirectional num_buckets: an integer max_distance: an integer Returns: a Tensor with the same shape as relative_position, containing int32 values in the range [0, num_buckets) """ relative_buckets = 0 if bidirectional: num_buckets //= 2 relative_buckets += (relative_position > 0).to(torch.long) * num_buckets relative_position = torch.abs(relative_position) else: relative_position = -torch.min(relative_position, torch.zeros_like(relative_position)) # now relative_position is in the range [0, inf) # half of the buckets are for exact increments in positions max_exact = num_buckets // 2 is_small = relative_position < max_exact # The other half of the buckets are for logarithmically bigger bins in positions up to max_distance relative_position_if_large = max_exact + ( torch.log(relative_position.float() / max_exact) / math.log(max_distance / max_exact) * (num_buckets - max_exact) ).to(torch.long) relative_position_if_large = torch.min( relative_position_if_large, torch.full_like(relative_position_if_large, num_buckets - 1) ) relative_buckets += torch.where(is_small, relative_position, relative_position_if_large) return relative_buckets def compute_bias(self, query_length, key_length, device=None): """Compute binned relative position bias""" if device is None: device = self.relative_attention_bias.weight.device context_position = torch.arange(query_length, dtype=torch.long, device=device)[:, None] memory_position = torch.arange(key_length, dtype=torch.long, device=device)[None, :] relative_position = memory_position - context_position # shape (query_length, key_length) relative_position_bucket = self._relative_position_bucket( relative_position, # shape (query_length, key_length) bidirectional=(not self.is_decoder), num_buckets=self.relative_attention_num_buckets, max_distance=self.relative_attention_max_distance, ) values = self.relative_attention_bias(relative_position_bucket) # shape (query_length, key_length, num_heads) values = values.permute([2, 0, 1]).unsqueeze(0) # shape (1, num_heads, query_length, key_length) return values def forward( self, hidden_states, mask=None, key_value_states=None, position_bias=None, past_key_value=None, layer_head_mask=None, query_length=None, use_cache=False, output_attentions=False, ): """ Self-attention (if key_value_states is None) or attention over source sentence (provided by key_value_states). """ # Input is (batch_size, seq_length, dim) # Mask is (batch_size, key_length) (non-causal) or (batch_size, key_length, key_length) # past_key_value[0] is (batch_size, n_heads, q_len - 1, dim_per_head) batch_size, seq_length = hidden_states.shape[:2] real_seq_length = seq_length if past_key_value is not None: assert ( len(past_key_value) == 2 ), f"past_key_value should have 2 past states: keys and values. Got { len(past_key_value)} past states" real_seq_length += past_key_value[0].shape[2] if query_length is None else query_length key_length = real_seq_length if key_value_states is None else key_value_states.shape[1] def shape(states): """projection""" return states.view(batch_size, -1, self.n_heads, self.key_value_proj_dim).transpose(1, 2) def unshape(states): """reshape""" return states.transpose(1, 2).contiguous().view(batch_size, -1, self.inner_dim) def project(hidden_states, proj_layer, key_value_states, past_key_value): """projects hidden states correctly to key/query states""" if key_value_states is None: # self-attn # (batch_size, n_heads, seq_length, dim_per_head) hidden_states = shape(proj_layer(hidden_states)) elif past_key_value is None: # cross-attn # (batch_size, n_heads, seq_length, dim_per_head) hidden_states = shape(proj_layer(key_value_states)) if past_key_value is not None: if key_value_states is None: # self-attn # (batch_size, n_heads, key_length, dim_per_head) hidden_states = torch.cat([past_key_value, hidden_states], dim=2) elif past_key_value.shape[2] != key_value_states.shape[1]: # checking that the `sequence_length` of the `past_key_value` is the same as # the provided `key_value_states` to support prefix tuning # cross-attn # (batch_size, n_heads, seq_length, dim_per_head) hidden_states = shape(proj_layer(key_value_states)) else: # cross-attn hidden_states = past_key_value return hidden_states # get query states query_states = shape(self.q(hidden_states)) # (batch_size, n_heads, seq_length, dim_per_head) # get key/value states key_states = project( hidden_states, self.k, key_value_states, past_key_value[0] if past_key_value is not None else None ) value_states = project( hidden_states, self.v, key_value_states, past_key_value[1] if past_key_value is not None else None ) # compute scores scores = torch.matmul( query_states, key_states.transpose(3, 2) ) # equivalent of torch.einsum("bnqd,bnkd->bnqk", query_states, key_states), compatible with onnx op>9 if position_bias is None: if not self.has_relative_attention_bias: position_bias = torch.zeros( (1, self.n_heads, real_seq_length, key_length), device=scores.device, dtype=scores.dtype ) if self.gradient_checkpointing and self.training: position_bias.requires_grad = True else: position_bias = self.compute_bias(real_seq_length, key_length, device=scores.device) # if key and values are already calculated # we want only the last query position bias if past_key_value is not None: position_bias = position_bias[:, :, -hidden_states.size(1) :, :] if mask is not None: position_bias = position_bias + mask # (batch_size, n_heads, seq_length, key_length) if self.pruned_heads: mask = torch.ones(position_bias.shape[1]) mask[list(self.pruned_heads)] = 0 position_bias_masked = position_bias[:, mask.bool()] else: position_bias_masked = position_bias scores += position_bias_masked attn_weights = nn.functional.softmax(scores.float(), dim=-1).type_as( scores ) # (batch_size, n_heads, seq_length, key_length) attn_weights = nn.functional.dropout( attn_weights, p=self.dropout, training=self.training ) # (batch_size, n_heads, seq_length, key_length) # Mask heads if we want to if layer_head_mask is not None: attn_weights = attn_weights * layer_head_mask attn_output = unshape(torch.matmul(attn_weights, value_states)) # (batch_size, seq_length, dim) attn_output = self.o(attn_output) present_key_value_state = (key_states, value_states) if (self.is_decoder and use_cache) else None outputs = (attn_output,) + (present_key_value_state,) + (position_bias,) if output_attentions: outputs = outputs + (attn_weights,) return outputs # Copied from transformers.models.t5.modeling_t5.T5LayerSelfAttention with T5->MT5 class MT5LayerSelfAttention(nn.Module): def __init__(self, config, has_relative_attention_bias=False): super().__init__() self.SelfAttention = MT5Attention(config, has_relative_attention_bias=has_relative_attention_bias) self.layer_norm = MT5LayerNorm(config.d_model, eps=config.layer_norm_epsilon) self.dropout = nn.Dropout(config.dropout_rate) def forward( self, hidden_states, attention_mask=None, position_bias=None, layer_head_mask=None, past_key_value=None, use_cache=False, output_attentions=False, ): normed_hidden_states = self.layer_norm(hidden_states) attention_output = self.SelfAttention( normed_hidden_states, mask=attention_mask, position_bias=position_bias, layer_head_mask=layer_head_mask, past_key_value=past_key_value, use_cache=use_cache, output_attentions=output_attentions, ) hidden_states = hidden_states + self.dropout(attention_output[0]) outputs = (hidden_states,) + attention_output[1:] # add attentions if we output them return outputs # Copied from transformers.models.t5.modeling_t5.T5LayerCrossAttention with T5->MT5 class MT5LayerCrossAttention(nn.Module): def __init__(self, config): super().__init__() self.EncDecAttention = MT5Attention(config, has_relative_attention_bias=False) self.layer_norm = MT5LayerNorm(config.d_model, eps=config.layer_norm_epsilon) self.dropout = nn.Dropout(config.dropout_rate) def forward( self, hidden_states, key_value_states, attention_mask=None, position_bias=None, layer_head_mask=None, past_key_value=None, use_cache=False, query_length=None, output_attentions=False, ): normed_hidden_states = self.layer_norm(hidden_states) attention_output = self.EncDecAttention( normed_hidden_states, mask=attention_mask, key_value_states=key_value_states, position_bias=position_bias, layer_head_mask=layer_head_mask, past_key_value=past_key_value, use_cache=use_cache, query_length=query_length, output_attentions=output_attentions, ) layer_output = hidden_states + self.dropout(attention_output[0]) outputs = (layer_output,) + attention_output[1:] # add attentions if we output them return outputs # Copied from transformers.models.t5.modeling_t5.T5Block with T5->MT5 class MT5Block(nn.Module): def __init__(self, config, has_relative_attention_bias=False): super().__init__() self.is_decoder = config.is_decoder self.layer = nn.ModuleList() self.layer.append(MT5LayerSelfAttention(config, has_relative_attention_bias=has_relative_attention_bias)) if self.is_decoder: self.layer.append(MT5LayerCrossAttention(config)) self.layer.append(MT5LayerFF(config)) def forward( self, hidden_states, attention_mask=None, position_bias=None, encoder_hidden_states=None, encoder_attention_mask=None, encoder_decoder_position_bias=None, layer_head_mask=None, cross_attn_layer_head_mask=None, past_key_value=None, use_cache=False, output_attentions=False, return_dict=True, ): if past_key_value is not None: if not self.is_decoder: logger.warning("`past_key_values` is passed to the encoder. Please make sure this is intended.") expected_num_past_key_values = 2 if encoder_hidden_states is None else 4 if len(past_key_value) != expected_num_past_key_values: raise ValueError( f"There should be {expected_num_past_key_values} past states. " f"{'2 (past / key) for cross attention. ' if expected_num_past_key_values == 4 else ''}" f"Got {len(past_key_value)} past key / value states" ) self_attn_past_key_value = past_key_value[:2] cross_attn_past_key_value = past_key_value[2:] else: self_attn_past_key_value, cross_attn_past_key_value = None, None self_attention_outputs = self.layer[0]( hidden_states, attention_mask=attention_mask, position_bias=position_bias, layer_head_mask=layer_head_mask, past_key_value=self_attn_past_key_value, use_cache=use_cache, output_attentions=output_attentions, ) hidden_states, present_key_value_state = self_attention_outputs[:2] attention_outputs = self_attention_outputs[2:] # Keep self-attention outputs and relative position weights # clamp inf values to enable fp16 training if hidden_states.dtype == torch.float16 and torch.isinf(hidden_states).any(): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) do_cross_attention = self.is_decoder and encoder_hidden_states is not None if do_cross_attention: # the actual query length is unknown for cross attention # if using past key value states. Need to inject it here if present_key_value_state is not None: query_length = present_key_value_state[0].shape[2] else: query_length = None cross_attention_outputs = self.layer[1]( hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, position_bias=encoder_decoder_position_bias, layer_head_mask=cross_attn_layer_head_mask, past_key_value=cross_attn_past_key_value, query_length=query_length, use_cache=use_cache, output_attentions=output_attentions, ) hidden_states = cross_attention_outputs[0] # clamp inf values to enable fp16 training if hidden_states.dtype == torch.float16 and torch.isinf(hidden_states).any(): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) # Combine self attn and cross attn key value states if present_key_value_state is not None: present_key_value_state = present_key_value_state + cross_attention_outputs[1] # Keep cross-attention outputs and relative position weights attention_outputs = attention_outputs + cross_attention_outputs[2:] # Apply Feed Forward layer hidden_states = self.layer[-1](hidden_states) # clamp inf values to enable fp16 training if hidden_states.dtype == torch.float16 and torch.isinf(hidden_states).any(): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) outputs = (hidden_states,) if use_cache: outputs = outputs + (present_key_value_state,) + attention_outputs else: outputs = outputs + attention_outputs return outputs # hidden-states, present_key_value_states, (self-attention position bias), (self-attention weights), (cross-attention position bias), (cross-attention weights) def load_tf_weights_in_mt5(model, config, tf_checkpoint_path): """Load tf checkpoints in a pytorch model.""" try: import re import numpy as np import tensorflow as tf except ImportError: logger.error( "Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see " "https://www.tensorflow.org/install/ for installation instructions." ) raise tf_path = os.path.abspath(tf_checkpoint_path) logger.info(f"Converting TensorFlow checkpoint from {tf_path}") # Load weights from TF model init_vars = tf.train.list_variables(tf_path) names = [] tf_weights = {} for name, shape in init_vars: logger.info(f"Loading TF weight {name} with shape {shape}") array = tf.train.load_variable(tf_path, name) names.append(name) tf_weights[name] = array for txt_name in names: name = txt_name.split("/") # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model if any( n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"] for n in name ): logger.info(f"Skipping {'/'.join(name)}") tf_weights.pop(txt_name, None) continue if "_slot_" in name[-1]: logger.info(f"Skipping {'/'.join(name)}") tf_weights.pop(txt_name, None) continue pointer = model array = tf_weights[txt_name] for m_name in name: if re.fullmatch(r"[A-Za-z]+_\d+", m_name): scope_names = re.split(r"_(\d+)", m_name) else: scope_names = [m_name] if scope_names[0] in ["kernel", "scale", "embedding"]: pointer = getattr(pointer, "weight") elif scope_names[0] == "self_attention": pointer = getattr(pointer, "layer") pointer = pointer[0] elif scope_names[0] == "enc_dec_attention": pointer = getattr(pointer, "layer") pointer = pointer[1] elif scope_names[0] == "dense_relu_dense": pointer = getattr(pointer, "layer") pointer = pointer[2] elif scope_names[0] == "rms_norm": if hasattr(pointer, "layer_norm"): pointer = getattr(pointer, "layer_norm") elif hasattr(pointer, "final_layer_norm"): pointer = getattr(pointer, "final_layer_norm") elif scope_names[0] == "scale": pointer = getattr(pointer, "weight") elif scope_names[0] == "output_bias" or scope_names[0] == "beta": pointer = getattr(pointer, "bias") elif scope_names[0] == "squad": pointer = getattr(pointer, "classifier") elif scope_names[0] == "decoder" and name[1] == "logits": continue elif scope_names[0] == "logits": pointer = getattr(pointer, "lm_head") elif scope_names[0] == "wi" and len(scope_names) > 1 and scope_names[1].isdigit(): pointer = getattr(pointer, f"wi_{scope_names[1]}") continue else: try: pointer = getattr(pointer, scope_names[0]) except AttributeError: logger.info(f"Skipping {'/'.join(name)}") continue if len(scope_names) >= 2: num = int(scope_names[1]) pointer = pointer[num] if scope_names[0] not in ["kernel", "scale", "embedding"]: pointer = getattr(pointer, "weight") if scope_names[0] != "embedding": logger.info(f"Transposing numpy weight of shape {array.shape} for {name}") array = np.transpose(array) try: assert ( pointer.shape == array.shape ), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched" except AssertionError as e: e.args += (pointer.shape, array.shape) raise logger.info(f"Initialize PyTorch weight {name}") pointer.data = torch.from_numpy(array.astype(np.float32)) tf_weights.pop(txt_name, None) logger.info(f"Weights not copied to PyTorch model: {', '.join(tf_weights.keys())}.") return model # Copied from transformers.models.t5.modeling_t5.T5PreTrainedModel with T5->MT5, t5->mt5 class MT5PreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = MT5Config load_tf_weights = load_tf_weights_in_mt5 base_model_prefix = "transformer" is_parallelizable = True supports_gradient_checkpointing = True _no_split_modules = ["MT5Block"] _keep_in_fp32_modules = ["wo"] @property def dummy_inputs(self): input_ids = torch.tensor(DUMMY_INPUTS) input_mask = torch.tensor(DUMMY_MASK) dummy_inputs = { "decoder_input_ids": input_ids, "input_ids": input_ids, "decoder_attention_mask": input_mask, } return dummy_inputs def _init_weights(self, module): """Initialize the weights""" factor = self.config.initializer_factor # Used for testing weights initialization if isinstance(module, MT5LayerNorm): module.weight.data.fill_(factor * 1.0) elif isinstance(module, (MT5Model, MT5ForConditionalGeneration, MT5EncoderModel)): # Mesh TensorFlow embeddings initialization # See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L1624 module.shared.weight.data.normal_(mean=0.0, std=factor * 1.0) if hasattr(module, "lm_head") and not self.config.tie_word_embeddings: module.lm_head.weight.data.normal_(mean=0.0, std=factor * 1.0) elif isinstance(module, MT5DenseActDense): # Mesh TensorFlow FF initialization # See https://github.com/tensorflow/mesh/blob/master/mesh_tensorflow/transformer/transformer_layers.py#L56 # and https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L89 module.wi.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_model) ** -0.5)) if hasattr(module.wi, "bias") and module.wi.bias is not None: module.wi.bias.data.zero_() module.wo.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_ff) ** -0.5)) if hasattr(module.wo, "bias") and module.wo.bias is not None: module.wo.bias.data.zero_() elif isinstance(module, MT5DenseGatedActDense): module.wi_0.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_model) ** -0.5)) if hasattr(module.wi_0, "bias") and module.wi_0.bias is not None: module.wi_0.bias.data.zero_() module.wi_1.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_model) ** -0.5)) if hasattr(module.wi_1, "bias") and module.wi_1.bias is not None: module.wi_1.bias.data.zero_() module.wo.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_ff) ** -0.5)) if hasattr(module.wo, "bias") and module.wo.bias is not None: module.wo.bias.data.zero_() elif isinstance(module, MT5Attention): # Mesh TensorFlow attention initialization to avoid scaling before softmax # See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/transformer/attention.py#L136 d_model = self.config.d_model key_value_proj_dim = self.config.d_kv n_heads = self.config.num_heads module.q.weight.data.normal_(mean=0.0, std=factor * ((d_model * key_value_proj_dim) ** -0.5)) module.k.weight.data.normal_(mean=0.0, std=factor * (d_model**-0.5)) module.v.weight.data.normal_(mean=0.0, std=factor * (d_model**-0.5)) module.o.weight.data.normal_(mean=0.0, std=factor * ((n_heads * key_value_proj_dim) ** -0.5)) if module.has_relative_attention_bias: module.relative_attention_bias.weight.data.normal_(mean=0.0, std=factor * ((d_model) ** -0.5)) def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, (MT5Attention, MT5Stack)): module.gradient_checkpointing = value def _shift_right(self, input_ids): decoder_start_token_id = self.config.decoder_start_token_id pad_token_id = self.config.pad_token_id assert decoder_start_token_id is not None, ( "self.model.config.decoder_start_token_id has to be defined. In MT5 it is usually set to the pad_token_id." " See MT5 docs for more information" ) # shift inputs to the right if is_torch_fx_proxy(input_ids): # Item assignment is not supported natively for proxies. shifted_input_ids = torch.full(input_ids.shape[:-1] + (1,), decoder_start_token_id) shifted_input_ids = torch.cat([shifted_input_ids, input_ids[..., :-1]], dim=-1) else: shifted_input_ids = input_ids.new_zeros(input_ids.shape) shifted_input_ids[..., 1:] = input_ids[..., :-1].clone() shifted_input_ids[..., 0] = decoder_start_token_id assert pad_token_id is not None, "self.model.config.pad_token_id has to be defined." # replace possible -100 values in labels by `pad_token_id` shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id) return shifted_input_ids # Copied from transformers.models.t5.modeling_t5.T5Stack with T5->MT5 class MT5Stack(MT5PreTrainedModel): def __init__(self, config, embed_tokens=None): super().__init__(config) self.embed_tokens = embed_tokens self.is_decoder = config.is_decoder self.block = nn.ModuleList( [MT5Block(config, has_relative_attention_bias=bool(i == 0)) for i in range(config.num_layers)] ) self.final_layer_norm = MT5LayerNorm(config.d_model, eps=config.layer_norm_epsilon) self.dropout = nn.Dropout(config.dropout_rate) # Initialize weights and apply final processing self.post_init() # Model parallel self.model_parallel = False self.device_map = None self.gradient_checkpointing = False @add_start_docstrings(PARALLELIZE_DOCSTRING) def parallelize(self, device_map=None): warnings.warn( "`MT5Stack.parallelize` is deprecated and will be removed in v5 of Transformers, you should load your model" " with `device_map='balanced'` in the call to `from_pretrained`. You can also provide your own" " `device_map` but it needs to be a dictionary module_name to device, so for instance {'block.0': 0," " 'block.1': 1, ...}", FutureWarning, ) # Check validity of device_map self.device_map = ( get_device_map(len(self.block), range(torch.cuda.device_count())) if device_map is None else device_map ) assert_device_map(self.device_map, len(self.block)) self.model_parallel = True self.first_device = "cpu" if "cpu" in self.device_map.keys() else "cuda:" + str(min(self.device_map.keys())) self.last_device = "cuda:" + str(max(self.device_map.keys())) # Load onto devices for k, v in self.device_map.items(): for layer in v: cuda_device = "cuda:" + str(k) self.block[layer] = self.block[layer].to(cuda_device) # Set embed_tokens to first layer self.embed_tokens = self.embed_tokens.to(self.first_device) # Set final layer norm to last device self.final_layer_norm = self.final_layer_norm.to(self.last_device) @add_start_docstrings(DEPARALLELIZE_DOCSTRING) def deparallelize(self): warnings.warn( "Like `parallelize`, `deparallelize` is deprecated and will be removed in v5 of Transformers.", FutureWarning, ) self.model_parallel = False self.device_map = None self.first_device = "cpu" self.last_device = "cpu" for i in range(len(self.block)): self.block[i] = self.block[i].to("cpu") self.embed_tokens = self.embed_tokens.to("cpu") self.final_layer_norm = self.final_layer_norm.to("cpu") torch.cuda.empty_cache() def get_input_embeddings(self): return self.embed_tokens def set_input_embeddings(self, new_embeddings): self.embed_tokens = new_embeddings def forward( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, inputs_embeds=None, head_mask=None, cross_attn_head_mask=None, past_key_values=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): # Model parallel if self.model_parallel: torch.cuda.set_device(self.first_device) self.embed_tokens = self.embed_tokens.to(self.first_device) use_cache = use_cache if use_cache is not None else self.config.use_cache output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if input_ids is not None and inputs_embeds is not None: err_msg_prefix = "decoder_" if self.is_decoder else "" raise ValueError( f"You cannot specify both {err_msg_prefix}input_ids and {err_msg_prefix}inputs_embeds at the same time" ) elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: err_msg_prefix = "decoder_" if self.is_decoder else "" raise ValueError(f"You have to specify either {err_msg_prefix}input_ids or {err_msg_prefix}inputs_embeds") if inputs_embeds is None: assert self.embed_tokens is not None, "You have to initialize the model with valid token embeddings" inputs_embeds = self.embed_tokens(input_ids) batch_size, seq_length = input_shape # required mask seq length can be calculated via length of past mask_seq_length = past_key_values[0][0].shape[2] + seq_length if past_key_values is not None else seq_length if use_cache is True: assert self.is_decoder, f"`use_cache` can only be set to `True` if {self} is used as a decoder" if attention_mask is None: attention_mask = torch.ones(batch_size, mask_seq_length, device=inputs_embeds.device) if self.is_decoder and encoder_attention_mask is None and encoder_hidden_states is not None: encoder_seq_length = encoder_hidden_states.shape[1] encoder_attention_mask = torch.ones( batch_size, encoder_seq_length, device=inputs_embeds.device, dtype=torch.long ) # initialize past_key_values with `None` if past does not exist if past_key_values is None: past_key_values = [None] * len(self.block) # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] # ourselves in which case we just need to make it broadcastable to all heads. extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape) # If a 2D or 3D attention mask is provided for the cross-attention # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] if self.is_decoder and encoder_hidden_states is not None: encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size() encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(encoder_hidden_shape, device=inputs_embeds.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) else: encoder_extended_attention_mask = None # Prepare head mask if needed head_mask = self.get_head_mask(head_mask, self.config.num_layers) cross_attn_head_mask = self.get_head_mask(cross_attn_head_mask, self.config.num_layers) present_key_value_states = () if use_cache else None all_hidden_states = () if output_hidden_states else None all_attentions = () if output_attentions else None all_cross_attentions = () if (output_attentions and self.is_decoder) else None position_bias = None encoder_decoder_position_bias = None hidden_states = self.dropout(inputs_embeds) for i, (layer_module, past_key_value) in enumerate(zip(self.block, past_key_values)): layer_head_mask = head_mask[i] cross_attn_layer_head_mask = cross_attn_head_mask[i] # Model parallel if self.model_parallel: torch.cuda.set_device(hidden_states.device) # Ensure that attention_mask is always on the same device as hidden_states if attention_mask is not None: attention_mask = attention_mask.to(hidden_states.device) if position_bias is not None: position_bias = position_bias.to(hidden_states.device) if encoder_hidden_states is not None: encoder_hidden_states = encoder_hidden_states.to(hidden_states.device) if encoder_extended_attention_mask is not None: encoder_extended_attention_mask = encoder_extended_attention_mask.to(hidden_states.device) if encoder_decoder_position_bias is not None: encoder_decoder_position_bias = encoder_decoder_position_bias.to(hidden_states.device) if layer_head_mask is not None: layer_head_mask = layer_head_mask.to(hidden_states.device) if cross_attn_layer_head_mask is not None: cross_attn_layer_head_mask = cross_attn_layer_head_mask.to(hidden_states.device) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if self.gradient_checkpointing and self.training: if use_cache: logger.warning_once( "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): return tuple(module(*inputs, use_cache, output_attentions)) return custom_forward layer_outputs = checkpoint( create_custom_forward(layer_module), hidden_states, extended_attention_mask, position_bias, encoder_hidden_states, encoder_extended_attention_mask, encoder_decoder_position_bias, layer_head_mask, cross_attn_layer_head_mask, None, # past_key_value is always None with gradient checkpointing ) else: layer_outputs = layer_module( hidden_states, attention_mask=extended_attention_mask, position_bias=position_bias, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, encoder_decoder_position_bias=encoder_decoder_position_bias, layer_head_mask=layer_head_mask, cross_attn_layer_head_mask=cross_attn_layer_head_mask, past_key_value=past_key_value, use_cache=use_cache, output_attentions=output_attentions, ) # layer_outputs is a tuple with: # hidden-states, key-value-states, (self-attention position bias), (self-attention weights), (cross-attention position bias), (cross-attention weights) if use_cache is False: layer_outputs = layer_outputs[:1] + (None,) + layer_outputs[1:] hidden_states, present_key_value_state = layer_outputs[:2] # We share the position biases between the layers - the first layer store them # layer_outputs = hidden-states, key-value-states (self-attention position bias), (self-attention weights), # (cross-attention position bias), (cross-attention weights) position_bias = layer_outputs[2] if self.is_decoder and encoder_hidden_states is not None: encoder_decoder_position_bias = layer_outputs[4 if output_attentions else 3] # append next layer key value states if use_cache: present_key_value_states = present_key_value_states + (present_key_value_state,) if output_attentions: all_attentions = all_attentions + (layer_outputs[3],) if self.is_decoder: all_cross_attentions = all_cross_attentions + (layer_outputs[5],) # Model Parallel: If it's the last layer for that device, put things on the next device if self.model_parallel: for k, v in self.device_map.items(): if i == v[-1] and "cuda:" + str(k) != self.last_device: hidden_states = hidden_states.to("cuda:" + str(k + 1)) hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.dropout(hidden_states) # Add last layer if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple( v for v in [ hidden_states, present_key_value_states, all_hidden_states, all_attentions, all_cross_attentions, ] if v is not None ) return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=present_key_value_states, hidden_states=all_hidden_states, attentions=all_attentions, cross_attentions=all_cross_attentions, ) MT5_START_DOCSTRING = r""" The MT5 model was proposed in [Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer](https://arxiv.org/abs/1910.10683) by Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu. It's an encoder decoder transformer pre-trained in a text-to-text denoising generative setting. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`MT5Config`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ MT5_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. MT5 is a model with relative position embeddings so you should be able to pad the inputs on both the right and the left. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for detail. [What are input IDs?](../glossary#input-ids) To know more on how to prepare `input_ids` for pretraining take a look a [MT5 Training](./mt5#training). attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Indices of decoder input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are decoder input IDs?](../glossary#decoder-input-ids) MT5 uses the `pad_token_id` as the starting token for `decoder_input_ids` generation. If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see `past_key_values`). To know more on how to prepare `decoder_input_ids` for pretraining take a look at [MT5 Training](./mt5#training). decoder_attention_mask (`torch.BoolTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also be used by default. head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules in the encoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. decoder_head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. encoder_outputs (`tuple(tuple(torch.FloatTensor)`, *optional*): Tuple consists of (`last_hidden_state`, `optional`: *hidden_states*, `optional`: *attentions*) `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)` is a sequence of hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. decoder_inputs_embeds (`torch.FloatTensor` of shape `(batch_size, target_sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `decoder_input_ids` you can choose to directly pass an embedded representation. If `past_key_values` is used, optionally only the last `decoder_inputs_embeds` have to be input (see `past_key_values`). This is useful if you want more control over how to convert `decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix. If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value of `inputs_embeds`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ MT5_ENCODER_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. MT5 is a model with relative position embeddings so you should be able to pad the inputs on both the right and the left. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for detail. To know more on how to prepare `input_ids` for pretraining take a look a [MT5 Training](./mt5#training). attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ # Warning message for FutureWarning: head_mask was separated into two input args - head_mask, decoder_head_mask __HEAD_MASK_WARNING_MSG = """ The input argument `head_mask` was split into two arguments `head_mask` and `decoder_head_mask`. Currently, `decoder_head_mask` is set to copy `head_mask`, but this feature is deprecated and will be removed in future versions. If you do not want to use any `decoder_head_mask` now, please set `decoder_head_mask = torch.ones(num_layers, num_heads)`. """ @add_start_docstrings( "The bare MT5 Model transformer outputting raw hidden-states without any specific head on top.", MT5_START_DOCSTRING, ) class MT5Model(MT5PreTrainedModel): r""" Examples: ```python >>> from transformers import MT5Model, AutoTokenizer >>> model = MT5Model.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> inputs = tokenizer(article, return_tensors="pt") >>> labels = tokenizer(text_target=summary, return_tensors="pt") >>> outputs = model(input_ids=inputs["input_ids"], decoder_input_ids=labels["input_ids"]) >>> hidden_states = outputs.last_hidden_state ```""" model_type = "mt5" config_class = MT5Config _keys_to_ignore_on_load_missing = [ r"encoder.embed_tokens.weight", r"decoder.embed_tokens.weight", r"decoder.block.0.layer.1.EncDecAttention.relative_attention_bias.weight", ] _keys_to_ignore_on_save = [ r"encoder.embed_tokens.weight", r"decoder.embed_tokens.weight", ] _keys_to_ignore_on_load_unexpected = [ r"decoder.block.0.layer.1.EncDecAttention.relative_attention_bias.weight", ] # Copied from transformers.models.t5.modeling_t5.T5Model.__init__ with T5->MT5 def __init__(self, config: MT5Config): super().__init__(config) self.shared = nn.Embedding(config.vocab_size, config.d_model) encoder_config = copy.deepcopy(config) encoder_config.is_decoder = False encoder_config.use_cache = False encoder_config.is_encoder_decoder = False self.encoder = MT5Stack(encoder_config, self.shared) decoder_config = copy.deepcopy(config) decoder_config.is_decoder = True decoder_config.is_encoder_decoder = False decoder_config.num_layers = config.num_decoder_layers self.decoder = MT5Stack(decoder_config, self.shared) # Initialize weights and apply final processing self.post_init() # Model parallel self.model_parallel = False self.device_map = None @add_start_docstrings(PARALLELIZE_DOCSTRING) # Copied from transformers.models.t5.modeling_t5.T5Model.parallelize def parallelize(self, device_map=None): warnings.warn( "`T5Model.parallelize` is deprecated and will be removed in v5 of Transformers, you should load your model" " with `device_map='balanced'` in the call to `from_pretrained`. You can also provide your own" " `device_map` but it needs to be a dictionary module_name to device, so for instance {'encoder.block.0':" " 0, 'encoder.block.1': 1, ...}", FutureWarning, ) self.device_map = ( get_device_map(len(self.encoder.block), range(torch.cuda.device_count())) if device_map is None else device_map ) assert_device_map(self.device_map, len(self.encoder.block)) self.encoder.parallelize(self.device_map) self.decoder.parallelize(self.device_map) self.model_parallel = True @add_start_docstrings(DEPARALLELIZE_DOCSTRING) # Copied from transformers.models.t5.modeling_t5.T5Model.deparallelize def deparallelize(self): warnings.warn( "Like `parallelize`, `deparallelize` is deprecated and will be removed in v5 of Transformers.", FutureWarning, ) self.encoder.deparallelize() self.decoder.deparallelize() self.encoder = self.encoder.to("cpu") self.decoder = self.decoder.to("cpu") self.model_parallel = False self.device_map = None torch.cuda.empty_cache() # Copied from transformers.models.t5.modeling_t5.T5Model.get_input_embeddings def get_input_embeddings(self): return self.shared # Copied from transformers.models.t5.modeling_t5.T5Model.set_input_embeddings def set_input_embeddings(self, new_embeddings): self.shared = new_embeddings self.encoder.set_input_embeddings(new_embeddings) self.decoder.set_input_embeddings(new_embeddings) # Copied from transformers.models.t5.modeling_t5.T5Model.get_encoder def get_encoder(self): return self.encoder # Copied from transformers.models.t5.modeling_t5.T5Model.get_decoder def get_decoder(self): return self.decoder # Copied from transformers.models.t5.modeling_t5.T5Model._prune_heads def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): self.encoder.layer[layer].attention.prune_heads(heads) @add_start_docstrings_to_model_forward(MT5_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=Seq2SeqModelOutput, config_class=_CONFIG_FOR_DOC) # Copied from transformers.models.t5.modeling_t5.T5Model.forward with T5->MT5, t5->mt5 def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.BoolTensor] = None, head_mask: Optional[torch.FloatTensor] = None, decoder_head_mask: Optional[torch.FloatTensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, inputs_embeds: Optional[torch.Tensor] = None, decoder_inputs_embeds: Optional[torch.Tensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.FloatTensor], Seq2SeqModelOutput]: r""" Returns: Example: ```python >>> from transformers import AutoTokenizer, MT5Model >>> tokenizer = AutoTokenizer.from_pretrained("mt5-small") >>> model = MT5Model.from_pretrained("mt5-small") >>> input_ids = tokenizer( ... "Studies have been shown that owning a dog is good for you", return_tensors="pt" ... ).input_ids # Batch size 1 >>> decoder_input_ids = tokenizer("Studies show that", return_tensors="pt").input_ids # Batch size 1 >>> # preprocess: Prepend decoder_input_ids with start token which is pad token for MT5Model. >>> # This is not needed for torch's MT5ForConditionalGeneration as it does this internally using labels arg. >>> decoder_input_ids = model._shift_right(decoder_input_ids) >>> # forward pass >>> outputs = model(input_ids=input_ids, decoder_input_ids=decoder_input_ids) >>> last_hidden_states = outputs.last_hidden_state ```""" use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # FutureWarning: head_mask was separated into two input args - head_mask, decoder_head_mask if head_mask is not None and decoder_head_mask is None: if self.config.num_layers == self.config.num_decoder_layers: warnings.warn(__HEAD_MASK_WARNING_MSG, FutureWarning) decoder_head_mask = head_mask # Encode if needed (training, first prediction pass) if encoder_outputs is None: encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, inputs_embeds=inputs_embeds, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) elif return_dict and not isinstance(encoder_outputs, BaseModelOutput): encoder_outputs = BaseModelOutput( last_hidden_state=encoder_outputs[0], hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None, attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None, ) hidden_states = encoder_outputs[0] # Set device for model parallelism if self.model_parallel: torch.cuda.set_device(self.decoder.first_device) hidden_states = hidden_states.to(self.decoder.first_device) if decoder_input_ids is not None: decoder_input_ids = decoder_input_ids.to(self.decoder.first_device) if attention_mask is not None: attention_mask = attention_mask.to(self.decoder.first_device) if decoder_attention_mask is not None: decoder_attention_mask = decoder_attention_mask.to(self.decoder.first_device) # Decode decoder_outputs = self.decoder( input_ids=decoder_input_ids, attention_mask=decoder_attention_mask, inputs_embeds=decoder_inputs_embeds, past_key_values=past_key_values, encoder_hidden_states=hidden_states, encoder_attention_mask=attention_mask, head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if not return_dict: return decoder_outputs + encoder_outputs return Seq2SeqModelOutput( last_hidden_state=decoder_outputs.last_hidden_state, past_key_values=decoder_outputs.past_key_values, decoder_hidden_states=decoder_outputs.hidden_states, decoder_attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, encoder_last_hidden_state=encoder_outputs.last_hidden_state, encoder_hidden_states=encoder_outputs.hidden_states, encoder_attentions=encoder_outputs.attentions, ) @add_start_docstrings("""MT5 Model with a `language modeling` head on top.""", MT5_START_DOCSTRING) class MT5ForConditionalGeneration(MT5PreTrainedModel): r""" Examples: ```python >>> from transformers import MT5ForConditionalGeneration, AutoTokenizer >>> model = MT5ForConditionalGeneration.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> inputs = tokenizer(article, text_target=summary, return_tensors="pt") >>> outputs = model(**inputs) >>> loss = outputs.loss ```""" model_type = "mt5" config_class = MT5Config _keys_to_ignore_on_load_missing = [ r"encoder.embed_tokens.weight", ] _keys_to_ignore_on_save = [ r"encoder.embed_tokens.weight", ] _keys_to_ignore_on_load_unexpected = [ r"decoder.block.0.layer.1.EncDecAttention.relative_attention_bias.weight", ] # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.__init__ with T5->MT5 def __init__(self, config: MT5Config): super().__init__(config) self.model_dim = config.d_model self.shared = nn.Embedding(config.vocab_size, config.d_model) encoder_config = copy.deepcopy(config) encoder_config.is_decoder = False encoder_config.use_cache = False encoder_config.is_encoder_decoder = False self.encoder = MT5Stack(encoder_config, self.shared) decoder_config = copy.deepcopy(config) decoder_config.is_decoder = True decoder_config.is_encoder_decoder = False decoder_config.num_layers = config.num_decoder_layers self.decoder = MT5Stack(decoder_config, self.shared) self.lm_head = nn.Linear(config.d_model, config.vocab_size, bias=False) # Initialize weights and apply final processing self.post_init() # Model parallel self.model_parallel = False self.device_map = None @add_start_docstrings(PARALLELIZE_DOCSTRING) # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.parallelize def parallelize(self, device_map=None): warnings.warn( "`T5ForConditionalGeneration.parallelize` is deprecated and will be removed in v5 of Transformers, you" " should load your model with `device_map='balanced'` in the call to `from_pretrained`. You can also" " provide your own `device_map` but it needs to be a dictionary module_name to device, so for instance" " {'encoder.block.0': 0, 'encoder.block.1': 1, ...}", FutureWarning, ) self.device_map = ( get_device_map(len(self.encoder.block), range(torch.cuda.device_count())) if device_map is None else device_map ) assert_device_map(self.device_map, len(self.encoder.block)) self.encoder.parallelize(self.device_map) self.decoder.parallelize(self.device_map) self.lm_head = self.lm_head.to(self.decoder.first_device) self.model_parallel = True @add_start_docstrings(DEPARALLELIZE_DOCSTRING) # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.deparallelize def deparallelize(self): warnings.warn( "Like `parallelize`, `deparallelize` is deprecated and will be removed in v5 of Transformers.", FutureWarning, ) self.encoder.deparallelize() self.decoder.deparallelize() self.encoder = self.encoder.to("cpu") self.decoder = self.decoder.to("cpu") self.lm_head = self.lm_head.to("cpu") self.model_parallel = False self.device_map = None torch.cuda.empty_cache() # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.get_input_embeddings def get_input_embeddings(self): return self.shared # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.set_input_embeddings def set_input_embeddings(self, new_embeddings): self.shared = new_embeddings self.encoder.set_input_embeddings(new_embeddings) self.decoder.set_input_embeddings(new_embeddings) # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.set_output_embeddings def set_output_embeddings(self, new_embeddings): self.lm_head = new_embeddings # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.get_output_embeddings def get_output_embeddings(self): return self.lm_head # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.get_encoder def get_encoder(self): return self.encoder # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.get_decoder def get_decoder(self): return self.decoder @add_start_docstrings_to_model_forward(MT5_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC) # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.forward with T5->MT5, t5->mt5 def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.BoolTensor] = None, head_mask: Optional[torch.FloatTensor] = None, decoder_head_mask: Optional[torch.FloatTensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[Tuple[Tuple[torch.Tensor]]] = None, past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.FloatTensor], Seq2SeqLMOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[-100, 0, ..., config.vocab_size - 1]`. All labels set to `-100` are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]` Returns: Examples: ```python >>> from transformers import AutoTokenizer, MT5ForConditionalGeneration >>> tokenizer = AutoTokenizer.from_pretrained("mt5-small") >>> model = MT5ForConditionalGeneration.from_pretrained("mt5-small") >>> # training >>> input_ids = tokenizer("The <extra_id_0> walks in <extra_id_1> park", return_tensors="pt").input_ids >>> labels = tokenizer("<extra_id_0> cute dog <extra_id_1> the <extra_id_2>", return_tensors="pt").input_ids >>> outputs = model(input_ids=input_ids, labels=labels) >>> loss = outputs.loss >>> logits = outputs.logits >>> # inference >>> input_ids = tokenizer( ... "summarize: studies have shown that owning a dog is good for you", return_tensors="pt" ... ).input_ids # Batch size 1 >>> outputs = model.generate(input_ids) >>> print(tokenizer.decode(outputs[0], skip_special_tokens=True)) >>> # studies have shown that owning a dog is good for you. ```""" use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # FutureWarning: head_mask was separated into two input args - head_mask, decoder_head_mask if head_mask is not None and decoder_head_mask is None: if self.config.num_layers == self.config.num_decoder_layers: warnings.warn(__HEAD_MASK_WARNING_MSG, FutureWarning) decoder_head_mask = head_mask # Encode if needed (training, first prediction pass) if encoder_outputs is None: # Convert encoder inputs in embeddings if needed encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, inputs_embeds=inputs_embeds, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) elif return_dict and not isinstance(encoder_outputs, BaseModelOutput): encoder_outputs = BaseModelOutput( last_hidden_state=encoder_outputs[0], hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None, attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None, ) hidden_states = encoder_outputs[0] if self.model_parallel: torch.cuda.set_device(self.decoder.first_device) if labels is not None and decoder_input_ids is None and decoder_inputs_embeds is None: # get decoder inputs from shifting lm labels to the right decoder_input_ids = self._shift_right(labels) # Set device for model parallelism if self.model_parallel: torch.cuda.set_device(self.decoder.first_device) hidden_states = hidden_states.to(self.decoder.first_device) if decoder_input_ids is not None: decoder_input_ids = decoder_input_ids.to(self.decoder.first_device) if attention_mask is not None: attention_mask = attention_mask.to(self.decoder.first_device) if decoder_attention_mask is not None: decoder_attention_mask = decoder_attention_mask.to(self.decoder.first_device) # Decode decoder_outputs = self.decoder( input_ids=decoder_input_ids, attention_mask=decoder_attention_mask, inputs_embeds=decoder_inputs_embeds, past_key_values=past_key_values, encoder_hidden_states=hidden_states, encoder_attention_mask=attention_mask, head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = decoder_outputs[0] # Set device for model parallelism if self.model_parallel: torch.cuda.set_device(self.encoder.first_device) self.lm_head = self.lm_head.to(self.encoder.first_device) sequence_output = sequence_output.to(self.lm_head.weight.device) if self.config.tie_word_embeddings: # Rescale output before projecting on vocab # See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/transformer/transformer.py#L586 sequence_output = sequence_output * (self.model_dim**-0.5) lm_logits = self.lm_head(sequence_output) loss = None if labels is not None: loss_fct = CrossEntropyLoss(ignore_index=-100) loss = loss_fct(lm_logits.view(-1, lm_logits.size(-1)), labels.view(-1)) # TODO(thom): Add z_loss https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L666 if not return_dict: output = (lm_logits,) + decoder_outputs[1:] + encoder_outputs return ((loss,) + output) if loss is not None else output return Seq2SeqLMOutput( loss=loss, logits=lm_logits, past_key_values=decoder_outputs.past_key_values, decoder_hidden_states=decoder_outputs.hidden_states, decoder_attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, encoder_last_hidden_state=encoder_outputs.last_hidden_state, encoder_hidden_states=encoder_outputs.hidden_states, encoder_attentions=encoder_outputs.attentions, ) # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.prepare_inputs_for_generation def prepare_inputs_for_generation( self, input_ids, past_key_values=None, attention_mask=None, head_mask=None, decoder_head_mask=None, cross_attn_head_mask=None, use_cache=None, encoder_outputs=None, **kwargs, ): # cut decoder_input_ids if past is used if past_key_values is not None: input_ids = input_ids[:, -1:] return { "decoder_input_ids": input_ids, "past_key_values": past_key_values, "encoder_outputs": encoder_outputs, "attention_mask": attention_mask, "head_mask": head_mask, "decoder_head_mask": decoder_head_mask, "cross_attn_head_mask": cross_attn_head_mask, "use_cache": use_cache, } # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration.prepare_decoder_input_ids_from_labels def prepare_decoder_input_ids_from_labels(self, labels: torch.Tensor): return self._shift_right(labels) # Copied from transformers.models.t5.modeling_t5.T5ForConditionalGeneration._reorder_cache def _reorder_cache(self, past_key_values, beam_idx): # if decoder past is not included in output # speedy decoding is disabled and no need to reorder if past_key_values is None: logger.warning("You might want to consider setting `use_cache=True` to speed up decoding") return past_key_values reordered_decoder_past = () for layer_past_states in past_key_values: # get the correct batch idx from layer past batch dim # batch dim of `past` is at 2nd position reordered_layer_past_states = () for layer_past_state in layer_past_states: # need to set correct `past` for each of the four key / value states reordered_layer_past_states = reordered_layer_past_states + ( layer_past_state.index_select(0, beam_idx.to(layer_past_state.device)), ) assert reordered_layer_past_states[0].shape == layer_past_states[0].shape assert len(reordered_layer_past_states) == len(layer_past_states) reordered_decoder_past = reordered_decoder_past + (reordered_layer_past_states,) return reordered_decoder_past @add_start_docstrings( "The bare MT5 Model transformer outputting encoder's raw hidden-states without any specific head on top.", MT5_START_DOCSTRING, ) class MT5EncoderModel(MT5PreTrainedModel): r""" Examples: ```python >>> from transformers import MT5EncoderModel, AutoTokenizer >>> model = MT5EncoderModel.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> input_ids = tokenizer(article, return_tensors="pt").input_ids >>> outputs = model(input_ids) >>> hidden_state = outputs.last_hidden_state ```""" model_type = "mt5" config_class = MT5Config _keys_to_ignore_on_load_missing = [ r"encoder.embed_tokens.weight", ] _keys_to_ignore_on_save = [ r"encoder.embed_tokens.weight", ] _keys_to_ignore_on_load_missing = [r"encoder.embed_tokens.weight"] # Copied from transformers.models.t5.modeling_t5.T5EncoderModel.__init__ with T5->MT5 def __init__(self, config: MT5Config): super().__init__(config) self.shared = nn.Embedding(config.vocab_size, config.d_model) encoder_config = copy.deepcopy(config) encoder_config.use_cache = False encoder_config.is_encoder_decoder = False self.encoder = MT5Stack(encoder_config, self.shared) # Initialize weights and apply final processing self.post_init() # Model parallel self.model_parallel = False self.device_map = None @add_start_docstrings(PARALLELIZE_DOCSTRING) # Copied from transformers.models.t5.modeling_t5.T5EncoderModel.parallelize def parallelize(self, device_map=None): warnings.warn( "`T5EncoderModel.parallelize` is deprecated and will be removed in v5 of Transformers, you should load" " your model with `device_map='balanced'` in the call to `from_pretrained`. You can also provide your own" " `device_map` but it needs to be a dictionary module_name to device, so for instance {'block.0': 0," " 'block.1': 1, ...}", FutureWarning, ) self.device_map = ( get_device_map(len(self.encoder.block), range(torch.cuda.device_count())) if device_map is None else device_map ) assert_device_map(self.device_map, len(self.encoder.block)) self.encoder.parallelize(self.device_map) self.model_parallel = True @add_start_docstrings(DEPARALLELIZE_DOCSTRING) # Copied from transformers.models.t5.modeling_t5.T5EncoderModel.deparallelize def deparallelize(self): warnings.warn( "Like `parallelize`, `deparallelize` is deprecated and will be removed in v5 of Transformers.", FutureWarning, ) self.encoder.deparallelize() self.encoder = self.encoder.to("cpu") self.model_parallel = False self.device_map = None torch.cuda.empty_cache() # Copied from transformers.models.t5.modeling_t5.T5EncoderModel.get_input_embeddings def get_input_embeddings(self): return self.shared # Copied from transformers.models.t5.modeling_t5.T5EncoderModel.set_input_embeddings def set_input_embeddings(self, new_embeddings): self.shared = new_embeddings self.encoder.set_input_embeddings(new_embeddings) # Copied from transformers.models.t5.modeling_t5.T5EncoderModel.get_encoder def get_encoder(self): return self.encoder # Copied from transformers.models.t5.modeling_t5.T5EncoderModel._prune_heads def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): self.encoder.block[layer].layer[0].SelfAttention.prune_heads(heads) @add_start_docstrings_to_model_forward(MT5_ENCODER_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutput, config_class=_CONFIG_FOR_DOC) # Copied from transformers.models.t5.modeling_t5.T5EncoderModel.forward with T5->MT5, t5->mt5 def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.FloatTensor], BaseModelOutput]: r""" Returns: Example: ```python >>> from transformers import AutoTokenizer, MT5EncoderModel >>> tokenizer = AutoTokenizer.from_pretrained("mt5-small") >>> model = MT5EncoderModel.from_pretrained("mt5-small") >>> input_ids = tokenizer( ... "Studies have been shown that owning a dog is good for you", return_tensors="pt" ... ).input_ids # Batch size 1 >>> outputs = model(input_ids=input_ids) >>> last_hidden_states = outputs.last_hidden_state ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, inputs_embeds=inputs_embeds, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) return encoder_outputs
27182812/ChatGLM-LLaMA-chinese-insturct
4,178
src/transformers/models/mt5/modeling_flax_mt5.py
# coding=utf-8 # Copyright 2021 Mesh TensorFlow authors, T5 Authors and HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Flax mT5 model.""" import numpy as np from ...utils import logging from ..t5.modeling_flax_t5 import FlaxT5EncoderModel, FlaxT5ForConditionalGeneration, FlaxT5Model from .configuration_mt5 import MT5Config logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = "T5Config" # Copied from transformers.models.bart.modeling_flax_bart.shift_tokens_right def shift_tokens_right(input_ids: np.array, pad_token_id: int, decoder_start_token_id: int) -> np.ndarray: """ Shift input ids one token to the right. """ shifted_input_ids = np.zeros_like(input_ids) shifted_input_ids[:, 1:] = input_ids[:, :-1] shifted_input_ids[:, 0] = decoder_start_token_id shifted_input_ids = np.where(shifted_input_ids == -100, pad_token_id, shifted_input_ids) return shifted_input_ids class FlaxMT5Model(FlaxT5Model): r""" This class overrides [`FlaxT5Model`]. Please check the superclass for the appropriate documentation alongside usage examples. Examples: ```python >>> from transformers import FlaxMT5Model, AutoTokenizer >>> model = FlaxMT5Model.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> inputs = tokenizer(article, return_tensors="np") >>> decoder_input_ids = tokenizer(text_target=summary, return_tensors="np").input_ids >>> outputs = model(input_ids=inputs["input_ids"], decoder_input_ids=decoder_input_ids) >>> hidden_states = outputs.last_hidden_state ```""" model_type = "mt5" config_class = MT5Config class FlaxMT5EncoderModel(FlaxT5EncoderModel): r""" This class overrides [`FlaxT5EncoderModel`]. Please check the superclass for the appropriate documentation alongside usage examples. Examples: ```python >>> from transformers import FlaxT5EncoderModel, AutoTokenizer >>> model = FlaxT5EncoderModel.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> inputs = tokenizer(article, return_tensors="np") >>> decoder_input_ids = tokenizer(text_target=summary, return_tensors="np").input_ids >>> outputs = model(input_ids=inputs["input_ids"]) >>> hidden_states = outputs.last_hidden_state ```""" model_type = "mt5" config_class = MT5Config class FlaxMT5ForConditionalGeneration(FlaxT5ForConditionalGeneration): r""" This class overrides [`FlaxT5ForConditionalGeneration`]. Please check the superclass for the appropriate documentation alongside usage examples. Examples: ```python >>> from transformers import FlaxMT5ForConditionalGeneration, AutoTokenizer >>> model = FlaxMT5ForConditionalGeneration.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> inputs = tokenizer(article, return_tensors="np") >>> decoder_input_ids = tokenizer(text_target=summary, return_tensors="np").input_ids >>> outputs = model(**inputs, decoder_input_ids=decoder_input_ids) >>> logits = outputs.logits ```""" model_type = "mt5" config_class = MT5Config
27182812/ChatGLM-LLaMA-chinese-insturct
7,588
src/transformers/models/mt5/configuration_mt5.py
# coding=utf-8 # Copyright 2020, The T5 Authors and HuggingFace Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ mT5 model configuration""" from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxSeq2SeqConfigWithPast from ...utils import logging logger = logging.get_logger(__name__) class MT5Config(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`MT5Model`] or a [`TFMT5Model`]. It is used to instantiate a mT5 model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the mT5 [google/mt5-small](https://huggingface.co/google/mt5-small) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Arguments: vocab_size (`int`, *optional*, defaults to 250112): Vocabulary size of the T5 model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`T5Model`] or [`TFT5Model`]. d_model (`int`, *optional*, defaults to 512): Size of the encoder layers and the pooler layer. d_kv (`int`, *optional*, defaults to 64): Size of the key, query, value projections per attention head. `d_kv` has to be equal to `d_model // num_heads`. d_ff (`int`, *optional*, defaults to 1024): Size of the intermediate feed forward layer in each `T5Block`. num_layers (`int`, *optional*, defaults to 8): Number of hidden layers in the Transformer encoder. num_decoder_layers (`int`, *optional*): Number of hidden layers in the Transformer decoder. Will use the same value as `num_layers` if not set. num_heads (`int`, *optional*, defaults to 6): Number of attention heads for each attention layer in the Transformer encoder. relative_attention_num_buckets (`int`, *optional*, defaults to 32): The number of buckets to use for each attention layer. relative_attention_max_distance (`int`, *optional*, defaults to 128): The maximum distance of the longer sequences for the bucket separation. dropout_rate (`float`, *optional*, defaults to 0.1): The ratio for all dropout layers. layer_norm_eps (`float`, *optional*, defaults to 1e-6): The epsilon used by the layer normalization layers. initializer_factor (`float`, *optional*, defaults to 1): A factor for initializing all weight matrices (should be kept to 1, used internally for initialization testing). feed_forward_proj (`string`, *optional*, defaults to `"gated-gelu"`): Type of feed forward layer to be used. Should be one of `"relu"` or `"gated-gelu"`. use_cache (`bool`, *optional*, defaults to `True`): Whether or not the model should return the last key/values attentions (not used by all models). """ model_type = "mt5" keys_to_ignore_at_inference = ["past_key_values"] def __init__( self, vocab_size=250112, d_model=512, d_kv=64, d_ff=1024, num_layers=8, num_decoder_layers=None, num_heads=6, relative_attention_num_buckets=32, relative_attention_max_distance=128, dropout_rate=0.1, layer_norm_epsilon=1e-6, initializer_factor=1.0, feed_forward_proj="gated-gelu", is_encoder_decoder=True, use_cache=True, tokenizer_class="T5Tokenizer", tie_word_embeddings=False, pad_token_id=0, eos_token_id=1, decoder_start_token_id=0, **kwargs, ): super().__init__( is_encoder_decoder=is_encoder_decoder, tokenizer_class=tokenizer_class, tie_word_embeddings=tie_word_embeddings, pad_token_id=pad_token_id, eos_token_id=eos_token_id, decoder_start_token_id=decoder_start_token_id, **kwargs, ) self.vocab_size = vocab_size self.d_model = d_model self.d_kv = d_kv self.d_ff = d_ff self.num_layers = num_layers self.num_decoder_layers = ( num_decoder_layers if num_decoder_layers is not None else self.num_layers ) # default = symmetry self.num_heads = num_heads self.relative_attention_num_buckets = relative_attention_num_buckets self.relative_attention_max_distance = relative_attention_max_distance self.dropout_rate = dropout_rate self.layer_norm_epsilon = layer_norm_epsilon self.initializer_factor = initializer_factor self.feed_forward_proj = feed_forward_proj self.use_cache = use_cache act_info = self.feed_forward_proj.split("-") self.dense_act_fn = act_info[-1] self.is_gated_act = act_info[0] == "gated" if len(act_info) > 1 and act_info[0] != "gated" or len(act_info) > 2: raise ValueError( f"`feed_forward_proj`: {feed_forward_proj} is not a valid activation function of the dense layer." "Please make sure `feed_forward_proj` is of the format `gated-{ACT_FN}` or `{ACT_FN}`, e.g. " "'gated-gelu' or 'relu'" ) # for backwards compatibility if feed_forward_proj == "gated-gelu": self.dense_act_fn = "gelu_new" @property def hidden_size(self): return self.d_model @property def num_attention_heads(self): return self.num_heads @property def num_hidden_layers(self): return self.num_layers class MT5OnnxConfig(OnnxSeq2SeqConfigWithPast): @property # Copied from transformers.models.t5.configuration_t5.T5OnnxConfig.inputs def inputs(self) -> Mapping[str, Mapping[int, str]]: common_inputs = { "input_ids": {0: "batch", 1: "encoder_sequence"}, "attention_mask": {0: "batch", 1: "encoder_sequence"}, } if self.use_past: common_inputs["attention_mask"][1] = "past_encoder_sequence + sequence" common_inputs["decoder_input_ids"] = {0: "batch"} common_inputs["decoder_attention_mask"] = {0: "batch", 1: "past_decoder_sequence + sequence"} else: common_inputs["decoder_input_ids"] = {0: "batch", 1: "decoder_sequence"} common_inputs["decoder_attention_mask"] = {0: "batch", 1: "decoder_sequence"} if self.use_past: self.fill_with_past_key_values_(common_inputs, direction="inputs") return common_inputs @property # Copied from transformers.models.t5.configuration_t5.T5OnnxConfig.default_onnx_opset def default_onnx_opset(self) -> int: return 13 @property def atol_for_validation(self) -> float: return 5e-4
27182812/ChatGLM-LLaMA-chinese-insturct
3,325
src/transformers/models/mt5/modeling_tf_mt5.py
# coding=utf-8 # Copyright 2020 Mesh TensorFlow authors, T5 Authors and HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Tensorflow mT5 model.""" from ...utils import logging from ..t5.modeling_tf_t5 import TFT5EncoderModel, TFT5ForConditionalGeneration, TFT5Model from .configuration_mt5 import MT5Config logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = "T5Config" class TFMT5Model(TFT5Model): r""" This class overrides [`TFT5Model`]. Please check the superclass for the appropriate documentation alongside usage examples. Examples: ```python >>> from transformers import TFMT5Model, AutoTokenizer >>> model = TFMT5Model.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> inputs = tokenizer(article, return_tensors="tf") >>> labels = tokenizer(text_target=summary, return_tensors="tf") >>> outputs = model(input_ids=inputs["input_ids"], decoder_input_ids=labels["input_ids"]) >>> hidden_states = outputs.last_hidden_state ```""" model_type = "mt5" config_class = MT5Config class TFMT5ForConditionalGeneration(TFT5ForConditionalGeneration): r""" This class overrides [`TFT5ForConditionalGeneration`]. Please check the superclass for the appropriate documentation alongside usage examples. Examples: ```python >>> from transformers import TFMT5ForConditionalGeneration, AutoTokenizer >>> model = TFMT5ForConditionalGeneration.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> inputs = tokenizer(article, text_target=summary, return_tensors="tf") >>> outputs = model(**inputs) >>> loss = outputs.loss ```""" model_type = "mt5" config_class = MT5Config class TFMT5EncoderModel(TFT5EncoderModel): r""" This class overrides [`TFT5EncoderModel`]. Please check the superclass for the appropriate documentation alongside usage examples. Examples: ```python >>> from transformers import TFMT5EncoderModel, AutoTokenizer >>> model = TFMT5EncoderModel.from_pretrained("google/mt5-small") >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> input_ids = tokenizer(article, return_tensors="tf").input_ids >>> outputs = model(input_ids) >>> hidden_state = outputs.last_hidden_state ```""" model_type = "mt5" config_class = MT5Config
27182812/ChatGLM-LLaMA-chinese-insturct
1,535
src/transformers/models/upernet/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _import_structure = { "configuration_upernet": ["UperNetConfig"], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_upernet"] = [ "UperNetForSemanticSegmentation", "UperNetPreTrainedModel", ] if TYPE_CHECKING: from .configuration_upernet import UperNetConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_upernet import UperNetForSemanticSegmentation, UperNetPreTrainedModel else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
17,379
src/transformers/models/upernet/modeling_upernet.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch UperNet model. Based on OpenMMLab's implementation, found in https://github.com/open-mmlab/mmsegmentation.""" from typing import List, Optional, Tuple, Union import torch from torch import nn from torch.nn import CrossEntropyLoss from ... import AutoBackbone from ...modeling_outputs import SemanticSegmenterOutput from ...modeling_utils import BackboneMixin, PreTrainedModel from ...utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from .configuration_upernet import UperNetConfig UPERNET_PRETRAINED_MODEL_ARCHIVE_LIST = [ "openmmlab/upernet-convnext-tiny", # See all UperNet models at https://huggingface.co/models?filter=upernet ] # General docstring _CONFIG_FOR_DOC = "UperNetConfig" class UperNetConvModule(nn.Module): """ A convolutional block that bundles conv/norm/activation layers. This block simplifies the usage of convolution layers, which are commonly used with a norm layer (e.g., BatchNorm) and activation layer (e.g., ReLU). """ def __init__( self, in_channels: int, out_channels: int, kernel_size: Union[int, Tuple[int, int]], padding: Union[int, Tuple[int, int], str] = 0, bias: bool = False, dilation: Union[int, Tuple[int, int]] = 1, ) -> None: super().__init__() self.conv = nn.Conv2d( in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, padding=padding, bias=bias, dilation=dilation, ) self.batch_norm = nn.BatchNorm2d(out_channels) self.activation = nn.ReLU() def forward(self, input: torch.Tensor) -> torch.Tensor: output = self.conv(input) output = self.batch_norm(output) output = self.activation(output) return output class UperNetPyramidPoolingBlock(nn.Module): def __init__(self, pool_scale: int, in_channels: int, channels: int) -> None: super().__init__() self.layers = [ nn.AdaptiveAvgPool2d(pool_scale), UperNetConvModule(in_channels, channels, kernel_size=1), ] for i, layer in enumerate(self.layers): self.add_module(str(i), layer) def forward(self, input: torch.Tensor) -> torch.Tensor: hidden_state = input for layer in self.layers: hidden_state = layer(hidden_state) return hidden_state class UperNetPyramidPoolingModule(nn.Module): """ Pyramid Pooling Module (PPM) used in PSPNet. Args: pool_scales (`Tuple[int]`): Pooling scales used in Pooling Pyramid Module. in_channels (`int`): Input channels. channels (`int`): Channels after modules, before conv_seg. align_corners (`bool`): align_corners argument of F.interpolate. """ def __init__(self, pool_scales: Tuple[int, ...], in_channels: int, channels: int, align_corners: bool) -> None: super().__init__() self.pool_scales = pool_scales self.align_corners = align_corners self.in_channels = in_channels self.channels = channels self.blocks = [] for i, pool_scale in enumerate(pool_scales): block = UperNetPyramidPoolingBlock(pool_scale=pool_scale, in_channels=in_channels, channels=channels) self.blocks.append(block) self.add_module(str(i), block) def forward(self, x: torch.Tensor) -> List[torch.Tensor]: ppm_outs = [] for ppm in self.blocks: ppm_out = ppm(x) upsampled_ppm_out = nn.functional.interpolate( ppm_out, size=x.size()[2:], mode="bilinear", align_corners=self.align_corners ) ppm_outs.append(upsampled_ppm_out) return ppm_outs class UperNetHead(nn.Module): """ Unified Perceptual Parsing for Scene Understanding. This head is the implementation of [UPerNet](https://arxiv.org/abs/1807.10221). """ def __init__(self, config, in_channels): super().__init__() self.config = config self.pool_scales = config.pool_scales # e.g. (1, 2, 3, 6) self.in_channels = in_channels self.channels = config.hidden_size self.align_corners = False self.classifier = nn.Conv2d(self.channels, config.num_labels, kernel_size=1) # PSP Module self.psp_modules = UperNetPyramidPoolingModule( self.pool_scales, self.in_channels[-1], self.channels, align_corners=self.align_corners, ) self.bottleneck = UperNetConvModule( self.in_channels[-1] + len(self.pool_scales) * self.channels, self.channels, kernel_size=3, padding=1, ) # FPN Module self.lateral_convs = nn.ModuleList() self.fpn_convs = nn.ModuleList() for in_channels in self.in_channels[:-1]: # skip the top layer l_conv = UperNetConvModule(in_channels, self.channels, kernel_size=1) fpn_conv = UperNetConvModule(self.channels, self.channels, kernel_size=3, padding=1) self.lateral_convs.append(l_conv) self.fpn_convs.append(fpn_conv) self.fpn_bottleneck = UperNetConvModule( len(self.in_channels) * self.channels, self.channels, kernel_size=3, padding=1, ) def init_weights(self): self.apply(self._init_weights) def _init_weights(self, module): if isinstance(module, nn.Conv2d): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() def psp_forward(self, inputs): x = inputs[-1] psp_outs = [x] psp_outs.extend(self.psp_modules(x)) psp_outs = torch.cat(psp_outs, dim=1) output = self.bottleneck(psp_outs) return output def forward(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor: # build laterals laterals = [lateral_conv(encoder_hidden_states[i]) for i, lateral_conv in enumerate(self.lateral_convs)] laterals.append(self.psp_forward(encoder_hidden_states)) # build top-down path used_backbone_levels = len(laterals) for i in range(used_backbone_levels - 1, 0, -1): prev_shape = laterals[i - 1].shape[2:] laterals[i - 1] = laterals[i - 1] + nn.functional.interpolate( laterals[i], size=prev_shape, mode="bilinear", align_corners=self.align_corners ) # build outputs fpn_outs = [self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels - 1)] # append psp feature fpn_outs.append(laterals[-1]) for i in range(used_backbone_levels - 1, 0, -1): fpn_outs[i] = nn.functional.interpolate( fpn_outs[i], size=fpn_outs[0].shape[2:], mode="bilinear", align_corners=self.align_corners ) fpn_outs = torch.cat(fpn_outs, dim=1) output = self.fpn_bottleneck(fpn_outs) output = self.classifier(output) return output class UperNetFCNHead(nn.Module): """ Fully Convolution Networks for Semantic Segmentation. This head is the implementation of [FCNNet](https://arxiv.org/abs/1411.4038>). Args: config: Configuration. in_channels (int): Number of input channels. kernel_size (int): The kernel size for convs in the head. Default: 3. dilation (int): The dilation rate for convs in the head. Default: 1. """ def __init__( self, config, in_index: int = 2, kernel_size: int = 3, dilation: Union[int, Tuple[int, int]] = 1 ) -> None: super().__init__() self.config = config self.in_channels = config.auxiliary_in_channels self.channels = config.auxiliary_channels self.num_convs = config.auxiliary_num_convs self.concat_input = config.auxiliary_concat_input self.in_index = in_index conv_padding = (kernel_size // 2) * dilation convs = [] convs.append( UperNetConvModule( self.in_channels, self.channels, kernel_size=kernel_size, padding=conv_padding, dilation=dilation ) ) for i in range(self.num_convs - 1): convs.append( UperNetConvModule( self.channels, self.channels, kernel_size=kernel_size, padding=conv_padding, dilation=dilation ) ) if self.num_convs == 0: self.convs = nn.Identity() else: self.convs = nn.Sequential(*convs) if self.concat_input: self.conv_cat = UperNetConvModule( self.in_channels + self.channels, self.channels, kernel_size=kernel_size, padding=kernel_size // 2 ) self.classifier = nn.Conv2d(self.channels, config.num_labels, kernel_size=1) def init_weights(self): self.apply(self._init_weights) def _init_weights(self, module): if isinstance(module, nn.Conv2d): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() def forward(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor: # just take the relevant feature maps hidden_states = encoder_hidden_states[self.in_index] output = self.convs(hidden_states) if self.concat_input: output = self.conv_cat(torch.cat([hidden_states, output], dim=1)) output = self.classifier(output) return output class UperNetPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = UperNetConfig main_input_name = "pixel_values" supports_gradient_checkpointing = True def _init_weights(self, module): if isinstance(module, UperNetPreTrainedModel): module.backbone.init_weights() module.decode_head.init_weights() module.auxiliary_head.init_weights() def init_weights(self): """Initialize the weights""" self.backbone.init_weights() self.decode_head.init_weights() self.auxiliary_head.init_weights() def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, BackboneMixin): module.gradient_checkpointing = value UPERNET_START_DOCSTRING = r""" Parameters: This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. config ([`UperNetConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ UPERNET_INPUTS_DOCSTRING = r""" Args: pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained using [`AutoImageProcessor`]. See [`SegformerImageProcessor.__call__`] for details. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers in case the backbone has them. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers of the backbone. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( """UperNet framework leveraging any vision backbone e.g. for ADE20k, CityScapes.""", UPERNET_START_DOCSTRING, ) class UperNetForSemanticSegmentation(UperNetPreTrainedModel): def __init__(self, config): super().__init__(config) self.backbone = AutoBackbone.from_config(config.backbone_config) # Semantic segmentation head(s) self.decode_head = UperNetHead(config, in_channels=self.backbone.channels) self.auxiliary_head = UperNetFCNHead(config) if config.use_auxiliary_head else None # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(UPERNET_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=SemanticSegmenterOutput, config_class=_CONFIG_FOR_DOC) def forward( self, pixel_values: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, labels: Optional[torch.Tensor] = None, return_dict: Optional[bool] = None, ) -> Union[tuple, SemanticSegmenterOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, height, width)`, *optional*): Ground truth semantic segmentation maps for computing the loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels > 1`, a classification loss is computed (Cross-Entropy). Returns: Examples: ```python >>> from transformers import AutoImageProcessor, UperNetForSemanticSegmentation >>> from PIL import Image >>> from huggingface_hub import hf_hub_download >>> image_processor = AutoImageProcessor.from_pretrained("openmmlab/upernet-convnext-tiny") >>> model = UperNetForSemanticSegmentation.from_pretrained("openmmlab/upernet-convnext-tiny") >>> filepath = hf_hub_download( ... repo_id="hf-internal-testing/fixtures_ade20k", filename="ADE_val_00000001.jpg", repo_type="dataset" ... ) >>> image = Image.open(filepath).convert("RGB") >>> inputs = image_processor(images=image, return_tensors="pt") >>> outputs = model(**inputs) >>> logits = outputs.logits # shape (batch_size, num_labels, height, width) >>> list(logits.shape) [1, 150, 512, 512] ```""" return_dict = return_dict if return_dict is not None else self.config.use_return_dict output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions outputs = self.backbone.forward_with_filtered_kwargs( pixel_values, output_hidden_states=output_hidden_states, output_attentions=output_attentions ) features = outputs.feature_maps logits = self.decode_head(features) logits = nn.functional.interpolate(logits, size=pixel_values.shape[2:], mode="bilinear", align_corners=False) auxiliary_logits = None if self.auxiliary_head is not None: auxiliary_logits = self.auxiliary_head(features) auxiliary_logits = nn.functional.interpolate( auxiliary_logits, size=pixel_values.shape[2:], mode="bilinear", align_corners=False ) loss = None if labels is not None: if self.config.num_labels == 1: raise ValueError("The number of labels should be greater than one") else: # compute weighted loss loss_fct = CrossEntropyLoss(ignore_index=self.config.loss_ignore_index) main_loss = loss_fct(logits, labels) auxiliary_loss = loss_fct(auxiliary_logits, labels) loss = main_loss + self.config.auxiliary_loss_weight * auxiliary_loss if not return_dict: if output_hidden_states: output = (logits,) + outputs[1:] else: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return SemanticSegmenterOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
10,268
src/transformers/models/upernet/convert_convnext_upernet_to_pytorch.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert ConvNext + UperNet checkpoints from mmsegmentation.""" import argparse import json import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import ConvNextConfig, SegformerImageProcessor, UperNetConfig, UperNetForSemanticSegmentation def get_upernet_config(model_name): auxiliary_in_channels = 384 if "tiny" in model_name: depths = [3, 3, 9, 3] hidden_sizes = [96, 192, 384, 768] if "small" in model_name: depths = [3, 3, 27, 3] hidden_sizes = [96, 192, 384, 768] if "base" in model_name: depths = [3, 3, 27, 3] hidden_sizes = [128, 256, 512, 1024] auxiliary_in_channels = 512 if "large" in model_name: depths = [3, 3, 27, 3] hidden_sizes = [192, 384, 768, 1536] auxiliary_in_channels = 768 if "xlarge" in model_name: depths = [3, 3, 27, 3] hidden_sizes = [256, 512, 1024, 2048] auxiliary_in_channels = 1024 # set label information num_labels = 150 repo_id = "huggingface/label-files" filename = "ade20k-id2label.json" id2label = json.load(open(hf_hub_download(repo_id, filename, repo_type="dataset"), "r")) id2label = {int(k): v for k, v in id2label.items()} label2id = {v: k for k, v in id2label.items()} backbone_config = ConvNextConfig( depths=depths, hidden_sizes=hidden_sizes, out_features=["stage1", "stage2", "stage3", "stage4"] ) config = UperNetConfig( backbone_config=backbone_config, auxiliary_in_channels=auxiliary_in_channels, num_labels=num_labels, id2label=id2label, label2id=label2id, ) return config # here we list all keys to be renamed (original name on the left, our name on the right) def create_rename_keys(config): rename_keys = [] # fmt: off # stem rename_keys.append(("backbone.downsample_layers.0.0.weight", "backbone.embeddings.patch_embeddings.weight")) rename_keys.append(("backbone.downsample_layers.0.0.bias", "backbone.embeddings.patch_embeddings.bias")) rename_keys.append(("backbone.downsample_layers.0.1.weight", "backbone.embeddings.layernorm.weight")) rename_keys.append(("backbone.downsample_layers.0.1.bias", "backbone.embeddings.layernorm.bias")) # stages for i in range(len(config.backbone_config.depths)): for j in range(config.backbone_config.depths[i]): rename_keys.append((f"backbone.stages.{i}.{j}.gamma", f"backbone.encoder.stages.{i}.layers.{j}.layer_scale_parameter")) rename_keys.append((f"backbone.stages.{i}.{j}.depthwise_conv.weight", f"backbone.encoder.stages.{i}.layers.{j}.dwconv.weight")) rename_keys.append((f"backbone.stages.{i}.{j}.depthwise_conv.bias", f"backbone.encoder.stages.{i}.layers.{j}.dwconv.bias")) rename_keys.append((f"backbone.stages.{i}.{j}.norm.weight", f"backbone.encoder.stages.{i}.layers.{j}.layernorm.weight")) rename_keys.append((f"backbone.stages.{i}.{j}.norm.bias", f"backbone.encoder.stages.{i}.layers.{j}.layernorm.bias")) rename_keys.append((f"backbone.stages.{i}.{j}.pointwise_conv1.weight", f"backbone.encoder.stages.{i}.layers.{j}.pwconv1.weight")) rename_keys.append((f"backbone.stages.{i}.{j}.pointwise_conv1.bias", f"backbone.encoder.stages.{i}.layers.{j}.pwconv1.bias")) rename_keys.append((f"backbone.stages.{i}.{j}.pointwise_conv2.weight", f"backbone.encoder.stages.{i}.layers.{j}.pwconv2.weight")) rename_keys.append((f"backbone.stages.{i}.{j}.pointwise_conv2.bias", f"backbone.encoder.stages.{i}.layers.{j}.pwconv2.bias")) if i > 0: rename_keys.append((f"backbone.downsample_layers.{i}.0.weight", f"backbone.encoder.stages.{i}.downsampling_layer.0.weight")) rename_keys.append((f"backbone.downsample_layers.{i}.0.bias", f"backbone.encoder.stages.{i}.downsampling_layer.0.bias")) rename_keys.append((f"backbone.downsample_layers.{i}.1.weight", f"backbone.encoder.stages.{i}.downsampling_layer.1.weight")) rename_keys.append((f"backbone.downsample_layers.{i}.1.bias", f"backbone.encoder.stages.{i}.downsampling_layer.1.bias")) rename_keys.append((f"backbone.norm{i}.weight", f"backbone.hidden_states_norms.stage{i+1}.weight")) rename_keys.append((f"backbone.norm{i}.bias", f"backbone.hidden_states_norms.stage{i+1}.bias")) # decode head rename_keys.extend( [ ("decode_head.conv_seg.weight", "decode_head.classifier.weight"), ("decode_head.conv_seg.bias", "decode_head.classifier.bias"), ("auxiliary_head.conv_seg.weight", "auxiliary_head.classifier.weight"), ("auxiliary_head.conv_seg.bias", "auxiliary_head.classifier.bias"), ] ) # fmt: on return rename_keys def rename_key(dct, old, new): val = dct.pop(old) dct[new] = val def convert_upernet_checkpoint(model_name, pytorch_dump_folder_path, push_to_hub): model_name_to_url = { "upernet-convnext-tiny": "https://download.openmmlab.com/mmsegmentation/v0.5/convnext/upernet_convnext_tiny_fp16_512x512_160k_ade20k/upernet_convnext_tiny_fp16_512x512_160k_ade20k_20220227_124553-cad485de.pth", "upernet-convnext-small": "https://download.openmmlab.com/mmsegmentation/v0.5/convnext/upernet_convnext_small_fp16_512x512_160k_ade20k/upernet_convnext_small_fp16_512x512_160k_ade20k_20220227_131208-1b1e394f.pth", "upernet-convnext-base": "https://download.openmmlab.com/mmsegmentation/v0.5/convnext/upernet_convnext_base_fp16_512x512_160k_ade20k/upernet_convnext_base_fp16_512x512_160k_ade20k_20220227_181227-02a24fc6.pth", "upernet-convnext-large": "https://download.openmmlab.com/mmsegmentation/v0.5/convnext/upernet_convnext_large_fp16_640x640_160k_ade20k/upernet_convnext_large_fp16_640x640_160k_ade20k_20220226_040532-e57aa54d.pth", "upernet-convnext-xlarge": "https://download.openmmlab.com/mmsegmentation/v0.5/convnext/upernet_convnext_xlarge_fp16_640x640_160k_ade20k/upernet_convnext_xlarge_fp16_640x640_160k_ade20k_20220226_080344-95fc38c2.pth", } checkpoint_url = model_name_to_url[model_name] state_dict = torch.hub.load_state_dict_from_url(checkpoint_url, map_location="cpu")["state_dict"] config = get_upernet_config(model_name) model = UperNetForSemanticSegmentation(config) model.eval() # replace "bn" => "batch_norm" for key in state_dict.copy().keys(): val = state_dict.pop(key) if "bn" in key: key = key.replace("bn", "batch_norm") state_dict[key] = val # rename keys rename_keys = create_rename_keys(config) for src, dest in rename_keys: rename_key(state_dict, src, dest) model.load_state_dict(state_dict) # verify on image url = "https://huggingface.co/datasets/hf-internal-testing/fixtures_ade20k/resolve/main/ADE_val_00000001.jpg" image = Image.open(requests.get(url, stream=True).raw).convert("RGB") processor = SegformerImageProcessor() pixel_values = processor(image, return_tensors="pt").pixel_values with torch.no_grad(): outputs = model(pixel_values) if model_name == "upernet-convnext-tiny": expected_slice = torch.tensor( [[-8.8110, -8.8110, -8.6521], [-8.8110, -8.8110, -8.6521], [-8.7746, -8.7746, -8.6130]] ) elif model_name == "upernet-convnext-small": expected_slice = torch.tensor( [[-8.8236, -8.8236, -8.6771], [-8.8236, -8.8236, -8.6771], [-8.7638, -8.7638, -8.6240]] ) elif model_name == "upernet-convnext-base": expected_slice = torch.tensor( [[-8.8558, -8.8558, -8.6905], [-8.8558, -8.8558, -8.6905], [-8.7669, -8.7669, -8.6021]] ) elif model_name == "upernet-convnext-large": expected_slice = torch.tensor( [[-8.6660, -8.6660, -8.6210], [-8.6660, -8.6660, -8.6210], [-8.6310, -8.6310, -8.5964]] ) elif model_name == "upernet-convnext-xlarge": expected_slice = torch.tensor( [[-8.4980, -8.4980, -8.3977], [-8.4980, -8.4980, -8.3977], [-8.4379, -8.4379, -8.3412]] ) print("Logits:", outputs.logits[0, 0, :3, :3]) assert torch.allclose(outputs.logits[0, 0, :3, :3], expected_slice, atol=1e-4) print("Looks ok!") if pytorch_dump_folder_path is not None: print(f"Saving model {model_name} to {pytorch_dump_folder_path}") model.save_pretrained(pytorch_dump_folder_path) print(f"Saving processor to {pytorch_dump_folder_path}") processor.save_pretrained(pytorch_dump_folder_path) if push_to_hub: print(f"Pushing model and processor for {model_name} to hub") model.push_to_hub(f"openmmlab/{model_name}") processor.push_to_hub(f"openmmlab/{model_name}") if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--model_name", default="upernet-convnext-tiny", type=str, choices=[f"upernet-convnext-{size}" for size in ["tiny", "small", "base", "large", "xlarge"]], help="Name of the ConvNext UperNet model you'd like to convert.", ) parser.add_argument( "--pytorch_dump_folder_path", default=None, type=str, help="Path to the output PyTorch model directory." ) parser.add_argument( "--push_to_hub", action="store_true", help="Whether or not to push the converted model to the 🤗 hub." ) args = parser.parse_args() convert_upernet_checkpoint(args.model_name, args.pytorch_dump_folder_path, args.push_to_hub)
27182812/ChatGLM-LLaMA-chinese-insturct
14,023
src/transformers/models/upernet/convert_swin_upernet_to_pytorch.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert Swin Transformer + UperNet checkpoints from mmsegmentation. URL: https://github.com/open-mmlab/mmsegmentation/tree/master/configs/swin """ import argparse import json import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import SegformerImageProcessor, SwinConfig, UperNetConfig, UperNetForSemanticSegmentation def get_upernet_config(model_name): auxiliary_in_channels = 384 window_size = 7 if "tiny" in model_name: embed_dim = 96 depths = (2, 2, 6, 2) num_heads = (3, 6, 12, 24) elif "small" in model_name: embed_dim = 96 depths = (2, 2, 18, 2) num_heads = (3, 6, 12, 24) elif "base" in model_name: embed_dim = 128 depths = (2, 2, 18, 2) num_heads = (4, 8, 16, 32) window_size = 12 auxiliary_in_channels = 512 elif "large" in model_name: embed_dim = 192 depths = (2, 2, 18, 2) num_heads = (6, 12, 24, 48) window_size = 12 auxiliary_in_channels = 768 # set label information num_labels = 150 repo_id = "huggingface/label-files" filename = "ade20k-id2label.json" id2label = json.load(open(hf_hub_download(repo_id, filename, repo_type="dataset"), "r")) id2label = {int(k): v for k, v in id2label.items()} label2id = {v: k for k, v in id2label.items()} backbone_config = SwinConfig( embed_dim=embed_dim, depths=depths, num_heads=num_heads, window_size=window_size, out_features=["stage1", "stage2", "stage3", "stage4"], ) config = UperNetConfig( backbone_config=backbone_config, auxiliary_in_channels=auxiliary_in_channels, num_labels=num_labels, id2label=id2label, label2id=label2id, ) return config # here we list all keys to be renamed (original name on the left, our name on the right) def create_rename_keys(config): rename_keys = [] # fmt: off # stem rename_keys.append(("backbone.patch_embed.projection.weight", "backbone.embeddings.patch_embeddings.projection.weight")) rename_keys.append(("backbone.patch_embed.projection.bias", "backbone.embeddings.patch_embeddings.projection.bias")) rename_keys.append(("backbone.patch_embed.norm.weight", "backbone.embeddings.norm.weight")) rename_keys.append(("backbone.patch_embed.norm.bias", "backbone.embeddings.norm.bias")) # stages for i in range(len(config.backbone_config.depths)): for j in range(config.backbone_config.depths[i]): rename_keys.append((f"backbone.stages.{i}.blocks.{j}.norm1.weight", f"backbone.encoder.layers.{i}.blocks.{j}.layernorm_before.weight")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.norm1.bias", f"backbone.encoder.layers.{i}.blocks.{j}.layernorm_before.bias")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.attn.w_msa.relative_position_bias_table", f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.relative_position_bias_table")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.attn.w_msa.relative_position_index", f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.relative_position_index")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.attn.w_msa.proj.weight", f"backbone.encoder.layers.{i}.blocks.{j}.attention.output.dense.weight")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.attn.w_msa.proj.bias", f"backbone.encoder.layers.{i}.blocks.{j}.attention.output.dense.bias")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.norm2.weight", f"backbone.encoder.layers.{i}.blocks.{j}.layernorm_after.weight")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.norm2.bias", f"backbone.encoder.layers.{i}.blocks.{j}.layernorm_after.bias")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.ffn.layers.0.0.weight", f"backbone.encoder.layers.{i}.blocks.{j}.intermediate.dense.weight")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.ffn.layers.0.0.bias", f"backbone.encoder.layers.{i}.blocks.{j}.intermediate.dense.bias")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.ffn.layers.1.weight", f"backbone.encoder.layers.{i}.blocks.{j}.output.dense.weight")) rename_keys.append((f"backbone.stages.{i}.blocks.{j}.ffn.layers.1.bias", f"backbone.encoder.layers.{i}.blocks.{j}.output.dense.bias")) if i < 3: rename_keys.append((f"backbone.stages.{i}.downsample.reduction.weight", f"backbone.encoder.layers.{i}.downsample.reduction.weight")) rename_keys.append((f"backbone.stages.{i}.downsample.norm.weight", f"backbone.encoder.layers.{i}.downsample.norm.weight")) rename_keys.append((f"backbone.stages.{i}.downsample.norm.bias", f"backbone.encoder.layers.{i}.downsample.norm.bias")) rename_keys.append((f"backbone.norm{i}.weight", f"backbone.hidden_states_norms.stage{i+1}.weight")) rename_keys.append((f"backbone.norm{i}.bias", f"backbone.hidden_states_norms.stage{i+1}.bias")) # decode head rename_keys.extend( [ ("decode_head.conv_seg.weight", "decode_head.classifier.weight"), ("decode_head.conv_seg.bias", "decode_head.classifier.bias"), ("auxiliary_head.conv_seg.weight", "auxiliary_head.classifier.weight"), ("auxiliary_head.conv_seg.bias", "auxiliary_head.classifier.bias"), ] ) # fmt: on return rename_keys def rename_key(dct, old, new): val = dct.pop(old) dct[new] = val # we split up the matrix of each encoder layer into queries, keys and values def read_in_q_k_v(state_dict, backbone_config): num_features = [int(backbone_config.embed_dim * 2**i) for i in range(len(backbone_config.depths))] for i in range(len(backbone_config.depths)): dim = num_features[i] for j in range(backbone_config.depths[i]): # fmt: off # read in weights + bias of input projection layer (in original implementation, this is a single matrix + bias) in_proj_weight = state_dict.pop(f"backbone.stages.{i}.blocks.{j}.attn.w_msa.qkv.weight") in_proj_bias = state_dict.pop(f"backbone.stages.{i}.blocks.{j}.attn.w_msa.qkv.bias") # next, add query, keys and values (in that order) to the state dict state_dict[f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.query.weight"] = in_proj_weight[:dim, :] state_dict[f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.query.bias"] = in_proj_bias[: dim] state_dict[f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.key.weight"] = in_proj_weight[ dim : dim * 2, : ] state_dict[f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.key.bias"] = in_proj_bias[ dim : dim * 2 ] state_dict[f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.value.weight"] = in_proj_weight[ -dim :, : ] state_dict[f"backbone.encoder.layers.{i}.blocks.{j}.attention.self.value.bias"] = in_proj_bias[-dim :] # fmt: on def correct_unfold_reduction_order(x): out_channel, in_channel = x.shape x = x.reshape(out_channel, 4, in_channel // 4) x = x[:, [0, 2, 1, 3], :].transpose(1, 2).reshape(out_channel, in_channel) return x def reverse_correct_unfold_reduction_order(x): out_channel, in_channel = x.shape x = x.reshape(out_channel, in_channel // 4, 4) x = x[:, :, [0, 2, 1, 3]].transpose(1, 2).reshape(out_channel, in_channel) return x def correct_unfold_norm_order(x): in_channel = x.shape[0] x = x.reshape(4, in_channel // 4) x = x[[0, 2, 1, 3], :].transpose(0, 1).reshape(in_channel) return x # there was an incompatibility with this version, due to a new implementation of their downsampling operation using nn.Unfold. # was resolved as seen here: # https://github.com/open-mmlab/mmdetection/blob/31c84958f54287a8be2b99cbf87a6dcf12e57753/mmdet/models/utils/ckpt_convert.py#L96. def reverse_correct_unfold_norm_order(x): in_channel = x.shape[0] x = x.reshape(in_channel // 4, 4) x = x[:, [0, 2, 1, 3]].transpose(0, 1).reshape(in_channel) return x def convert_upernet_checkpoint(model_name, pytorch_dump_folder_path, push_to_hub): model_name_to_url = { "upernet-swin-tiny": "https://download.openmmlab.com/mmsegmentation/v0.5/swin/upernet_swin_tiny_patch4_window7_512x512_160k_ade20k_pretrain_224x224_1K/upernet_swin_tiny_patch4_window7_512x512_160k_ade20k_pretrain_224x224_1K_20210531_112542-e380ad3e.pth", "upernet-swin-small": "https://download.openmmlab.com/mmsegmentation/v0.5/swin/upernet_swin_small_patch4_window7_512x512_160k_ade20k_pretrain_224x224_1K/upernet_swin_small_patch4_window7_512x512_160k_ade20k_pretrain_224x224_1K_20210526_192015-ee2fff1c.pth", "upernet-swin-base": "https://download.openmmlab.com/mmsegmentation/v0.5/swin/upernet_swin_base_patch4_window12_512x512_160k_ade20k_pretrain_384x384_22K/upernet_swin_base_patch4_window12_512x512_160k_ade20k_pretrain_384x384_22K_20210531_125459-429057bf.pth", "upernet-swin-large": "https://download.openmmlab.com/mmsegmentation/v0.5/swin/upernet_swin_large_patch4_window12_512x512_pretrain_384x384_22K_160k_ade20k/upernet_swin_large_patch4_window12_512x512_pretrain_384x384_22K_160k_ade20k_20220318_091743-9ba68901.pth", } checkpoint_url = model_name_to_url[model_name] state_dict = torch.hub.load_state_dict_from_url(checkpoint_url, map_location="cpu", file_name=model_name)[ "state_dict" ] for name, param in state_dict.items(): print(name, param.shape) config = get_upernet_config(model_name) model = UperNetForSemanticSegmentation(config) model.eval() # replace "bn" => "batch_norm" for key in state_dict.copy().keys(): val = state_dict.pop(key) if "bn" in key: key = key.replace("bn", "batch_norm") state_dict[key] = val # rename keys rename_keys = create_rename_keys(config) for src, dest in rename_keys: rename_key(state_dict, src, dest) read_in_q_k_v(state_dict, config.backbone_config) # fix downsample parameters for key, value in state_dict.items(): if "downsample" in key: if "reduction" in key: state_dict[key] = reverse_correct_unfold_reduction_order(value) if "norm" in key: state_dict[key] = reverse_correct_unfold_norm_order(value) model.load_state_dict(state_dict) # verify on image url = "https://huggingface.co/datasets/hf-internal-testing/fixtures_ade20k/resolve/main/ADE_val_00000001.jpg" image = Image.open(requests.get(url, stream=True).raw).convert("RGB") processor = SegformerImageProcessor() pixel_values = processor(image, return_tensors="pt").pixel_values with torch.no_grad(): outputs = model(pixel_values) logits = outputs.logits print(logits.shape) print("First values of logits:", logits[0, 0, :3, :3]) # assert values if model_name == "upernet-swin-tiny": expected_slice = torch.tensor( [[-7.5958, -7.5958, -7.4302], [-7.5958, -7.5958, -7.4302], [-7.4797, -7.4797, -7.3068]] ) elif model_name == "upernet-swin-small": expected_slice = torch.tensor( [[-7.1921, -7.1921, -6.9532], [-7.1921, -7.1921, -6.9532], [-7.0908, -7.0908, -6.8534]] ) elif model_name == "upernet-swin-base": expected_slice = torch.tensor( [[-6.5851, -6.5851, -6.4330], [-6.5851, -6.5851, -6.4330], [-6.4763, -6.4763, -6.3254]] ) elif model_name == "upernet-swin-large": expected_slice = torch.tensor( [[-7.5297, -7.5297, -7.3802], [-7.5297, -7.5297, -7.3802], [-7.4044, -7.4044, -7.2586]] ) print("Logits:", outputs.logits[0, 0, :3, :3]) assert torch.allclose(outputs.logits[0, 0, :3, :3], expected_slice, atol=1e-4) print("Looks ok!") if pytorch_dump_folder_path is not None: print(f"Saving model {model_name} to {pytorch_dump_folder_path}") model.save_pretrained(pytorch_dump_folder_path) print(f"Saving processor to {pytorch_dump_folder_path}") processor.save_pretrained(pytorch_dump_folder_path) if push_to_hub: print(f"Pushing model and processor for {model_name} to hub") model.push_to_hub(f"openmmlab/{model_name}") processor.push_to_hub(f"openmmlab/{model_name}") if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--model_name", default="upernet-swin-tiny", type=str, choices=[f"upernet-swin-{size}" for size in ["tiny", "small", "base", "large"]], help="Name of the Swin + UperNet model you'd like to convert.", ) parser.add_argument( "--pytorch_dump_folder_path", default=None, type=str, help="Path to the output PyTorch model directory." ) parser.add_argument( "--push_to_hub", action="store_true", help="Whether or not to push the converted model to the 🤗 hub." ) args = parser.parse_args() convert_upernet_checkpoint(args.model_name, args.pytorch_dump_folder_path, args.push_to_hub)
27182812/ChatGLM-LLaMA-chinese-insturct
5,439
src/transformers/models/upernet/configuration_upernet.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ UperNet model configuration""" import copy from ...configuration_utils import PretrainedConfig from ...utils import logging from ..auto.configuration_auto import CONFIG_MAPPING logger = logging.get_logger(__name__) class UperNetConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of an [`UperNetForSemanticSegmentation`]. It is used to instantiate an UperNet model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the UperNet [openmmlab/upernet-convnext-tiny](https://huggingface.co/openmmlab/upernet-convnext-tiny) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: backbone_config (`PretrainedConfig` or `dict`, *optional*, defaults to `ResNetConfig()`): The configuration of the backbone model. hidden_size (`int`, *optional*, defaults to 512): The number of hidden units in the convolutional layers. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. pool_scales (`Tuple[int]`, *optional*, defaults to `[1, 2, 3, 6]`): Pooling scales used in Pooling Pyramid Module applied on the last feature map. use_auxiliary_head (`bool`, *optional*, defaults to `True`): Whether to use an auxiliary head during training. auxiliary_loss_weight (`float`, *optional*, defaults to 0.4): Weight of the cross-entropy loss of the auxiliary head. auxiliary_channels (`int`, *optional*, defaults to 256): Number of channels to use in the auxiliary head. auxiliary_num_convs (`int`, *optional*, defaults to 1): Number of convolutional layers to use in the auxiliary head. auxiliary_concat_input (`bool`, *optional*, defaults to `False`): Whether to concatenate the output of the auxiliary head with the input before the classification layer. loss_ignore_index (`int`, *optional*, defaults to 255): The index that is ignored by the loss function. Examples: ```python >>> from transformers import UperNetConfig, UperNetForSemanticSegmentation >>> # Initializing a configuration >>> configuration = UperNetConfig() >>> # Initializing a model (with random weights) from the configuration >>> model = UperNetForSemanticSegmentation(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "upernet" def __init__( self, backbone_config=None, hidden_size=512, initializer_range=0.02, pool_scales=[1, 2, 3, 6], use_auxiliary_head=True, auxiliary_loss_weight=0.4, auxiliary_in_channels=384, auxiliary_channels=256, auxiliary_num_convs=1, auxiliary_concat_input=False, loss_ignore_index=255, **kwargs, ): super().__init__(**kwargs) if backbone_config is None: logger.info("`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.") backbone_config = CONFIG_MAPPING["resnet"](out_features=["stage1", "stage2", "stage3", "stage4"]) elif isinstance(backbone_config, dict): backbone_model_type = backbone_config.get("model_type") config_class = CONFIG_MAPPING[backbone_model_type] backbone_config = config_class.from_dict(backbone_config) self.backbone_config = backbone_config self.hidden_size = hidden_size self.initializer_range = initializer_range self.pool_scales = pool_scales self.use_auxiliary_head = use_auxiliary_head self.auxiliary_loss_weight = auxiliary_loss_weight self.auxiliary_in_channels = auxiliary_in_channels self.auxiliary_channels = auxiliary_channels self.auxiliary_num_convs = auxiliary_num_convs self.auxiliary_concat_input = auxiliary_concat_input self.loss_ignore_index = loss_ignore_index def to_dict(self): """ Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`]. Returns: `Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance, """ output = copy.deepcopy(self.__dict__) output["backbone_config"] = self.backbone_config.to_dict() output["model_type"] = self.__class__.model_type return output
27182812/ChatGLM-LLaMA-chinese-insturct
1,847
src/transformers/models/mbart50/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tokenizers_available _import_structure = {} try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_mbart50"] = ["MBart50Tokenizer"] try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_mbart50_fast"] = ["MBart50TokenizerFast"] if TYPE_CHECKING: try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_mbart50 import MBart50Tokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_mbart50_fast import MBart50TokenizerFast else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
16,647
src/transformers/models/mbart50/tokenization_mbart50.py
# coding=utf-8 # Copyright 2021 The Facebook AI Research Team Authors and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, BatchEncoding, PreTrainedTokenizer from ...utils import logging logger = logging.get_logger(__name__) SPIECE_UNDERLINE = "▁" VOCAB_FILES_NAMES = {"vocab_file": "sentencepiece.bpe.model"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "facebook/mbart-large-50-one-to-many-mmt": ( "https://huggingface.co/facebook/mbart-large-50-one-to-many-mmt/resolve/main/sentencepiece.bpe.model" ), } } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "facebook/mbart-large-50-one-to-many-mmt": 1024, } # fmt: off FAIRSEQ_LANGUAGE_CODES = ["ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN", "af_ZA", "az_AZ", "bn_IN", "fa_IR", "he_IL", "hr_HR", "id_ID", "ka_GE", "km_KH", "mk_MK", "ml_IN", "mn_MN", "mr_IN", "pl_PL", "ps_AF", "pt_XX", "sv_SE", "sw_KE", "ta_IN", "te_IN", "th_TH", "tl_XX", "uk_UA", "ur_PK", "xh_ZA", "gl_ES", "sl_SI"] # fmt: on class MBart50Tokenizer(PreTrainedTokenizer): """ Construct a MBart50 tokenizer. Based on [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): Path to the vocabulary file. src_lang (`str`, *optional*): A string representing the source language. tgt_lang (`str`, *optional*): A string representing the target language. eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. sep_token (`str`, *optional*, defaults to `"</s>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. cls_token (`str`, *optional*, defaults to `"<s>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. sp_model_kwargs (`dict`, *optional*): Will be passed to the `SentencePieceProcessor.__init__()` method. The [Python wrapper for SentencePiece](https://github.com/google/sentencepiece/tree/master/python) can be used, among other things, to set: - `enable_sampling`: Enable subword regularization. - `nbest_size`: Sampling parameters for unigram. Invalid for BPE-Dropout. - `nbest_size = {0,1}`: No sampling is performed. - `nbest_size > 1`: samples from the nbest_size results. - `nbest_size < 0`: assuming that nbest_size is infinite and samples from the all hypothesis (lattice) using forward-filtering-and-backward-sampling algorithm. - `alpha`: Smoothing parameter for unigram sampling, and dropout probability of merge operations for BPE-dropout. Examples: ```python >>> from transformers import MBart50Tokenizer >>> tokenizer = MBart50Tokenizer.from_pretrained("facebook/mbart-large-50", src_lang="en_XX", tgt_lang="ro_RO") >>> src_text = " UN Chief Says There Is No Military Solution in Syria" >>> tgt_text = "Şeful ONU declară că nu există o soluţie militară în Siria" >>> model_inputs = tokenizer(src_text, text_target=tgt_text, return_tensors="pt") >>> # model(**model_inputs) should work ```""" vocab_files_names = VOCAB_FILES_NAMES max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP model_input_names = ["input_ids", "attention_mask"] prefix_tokens: List[int] = [] suffix_tokens: List[int] = [] def __init__( self, vocab_file, src_lang=None, tgt_lang=None, eos_token="</s>", sep_token="</s>", cls_token="<s>", unk_token="<unk>", pad_token="<pad>", mask_token="<mask>", sp_model_kwargs: Optional[Dict[str, Any]] = None, **kwargs, ) -> None: # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token self.sp_model_kwargs = {} if sp_model_kwargs is None else sp_model_kwargs kwargs["additional_special_tokens"] = kwargs.get("additional_special_tokens", []) kwargs["additional_special_tokens"] += [ code for code in FAIRSEQ_LANGUAGE_CODES if code not in kwargs["additional_special_tokens"] ] super().__init__( src_lang=src_lang, tgt_lang=tgt_lang, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, cls_token=cls_token, pad_token=pad_token, mask_token=mask_token, sp_model_kwargs=self.sp_model_kwargs, **kwargs, ) self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(str(vocab_file)) self.vocab_file = vocab_file # Original fairseq vocab and spm vocab must be "aligned": # Vocab | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 # -------- | ------- | ------- | ------ | ------- | --- | --- | --- | ----- | ----- | ---- # fairseq | '<s>' | '<pad>' | '</s>' | '<unk>' | ',' | '.' | '▁' | 's' | '▁de' | '-' # spm | '<unk>' | '<s>' | '</s>' | ',' | '.' | '▁' | 's' | '▁de' | '-' | '▁a' # Mimic fairseq token-to-id alignment for the first 4 token self.fairseq_tokens_to_ids = {"<s>": 0, "<pad>": 1, "</s>": 2, "<unk>": 3} # The first "real" token "," has position 4 in the original fairseq vocab and position 3 in the spm vocab self.fairseq_offset = 1 self.sp_model_size = len(self.sp_model) self.lang_code_to_id = { code: self.sp_model_size + i + self.fairseq_offset for i, code in enumerate(FAIRSEQ_LANGUAGE_CODES) } self.id_to_lang_code = {v: k for k, v in self.lang_code_to_id.items()} self.fairseq_tokens_to_ids["<mask>"] = len(self.sp_model) + len(self.lang_code_to_id) + self.fairseq_offset self.fairseq_tokens_to_ids.update(self.lang_code_to_id) self.fairseq_ids_to_tokens = {v: k for k, v in self.fairseq_tokens_to_ids.items()} self._src_lang = src_lang if src_lang is not None else "en_XX" self.cur_lang_code_id = self.lang_code_to_id[self._src_lang] self.tgt_lang = tgt_lang self.set_src_lang_special_tokens(self._src_lang) @property def vocab_size(self) -> int: return len(self.sp_model) + len(self.lang_code_to_id) + self.fairseq_offset + 1 # Plus 1 for the mask token @property def src_lang(self) -> str: return self._src_lang @src_lang.setter def src_lang(self, new_src_lang: str) -> None: self._src_lang = new_src_lang self.set_src_lang_special_tokens(self._src_lang) def __getstate__(self) -> Dict: state = self.__dict__.copy() state["sp_model"] = None return state def __setstate__(self, d: Dict) -> None: self.__dict__ = d # for backward compatibility if not hasattr(self, "sp_model_kwargs"): self.sp_model_kwargs = {} self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(self.vocab_file) def get_vocab(self) -> Dict: vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} vocab.update(self.added_tokens_encoder) return vocab def _tokenize(self, text: str) -> List[str]: return self.sp_model.encode(text, out_type=str) def _convert_token_to_id(self, token: str) -> int: """Converts a token (str) in an id using the vocab.""" if token in self.fairseq_tokens_to_ids: return self.fairseq_tokens_to_ids[token] spm_id = self.sp_model.PieceToId(token) # Need to return unknown token if the SP model returned 0 return spm_id + self.fairseq_offset if spm_id else self.unk_token_id def _convert_id_to_token(self, index: int) -> str: """Converts an index (integer) in a token (str) using the vocab.""" if index in self.fairseq_ids_to_tokens: return self.fairseq_ids_to_tokens[index] return self.sp_model.IdToPiece(index - self.fairseq_offset) def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (string) in a single string.""" current_sub_tokens = [] out_string = "" prev_is_special = False for token in tokens: # make sure that special tokens are not decoded using sentencepiece model if token in self.all_special_tokens: if not prev_is_special: out_string += " " out_string += self.sp_model.decode(current_sub_tokens) + token prev_is_special = True current_sub_tokens = [] else: current_sub_tokens.append(token) prev_is_special = False out_string += self.sp_model.decode(current_sub_tokens) return out_string.strip() def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file) and os.path.isfile(self.vocab_file): copyfile(self.vocab_file, out_vocab_file) elif not os.path.isfile(self.vocab_file): with open(out_vocab_file, "wb") as fi: content_spiece_model = self.sp_model.serialized_model_proto() fi.write(content_spiece_model) return (out_vocab_file,) def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_special_tokens (`bool`, *optional*, defaults to `False`): Whether or not the token list is already formatted with special tokens for the model. Returns: `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True ) prefix_ones = [1] * len(self.prefix_tokens) suffix_ones = [1] * len(self.suffix_tokens) if token_ids_1 is None: return prefix_ones + ([0] * len(token_ids_0)) + suffix_ones return prefix_ones + ([0] * len(token_ids_0)) + ([0] * len(token_ids_1)) + suffix_ones def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. An MBART-50 sequence has the following format, where `X` represents the sequence: - `input_ids` (for encoder) `[src_lang_code] X [eos]` - `labels`: (for decoder) `[tgt_lang_code] X [eos]` BOS is never used. Pairs of sequences are not the expected use case, but they will be handled without a separator. Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ if token_ids_1 is None: return self.prefix_tokens + token_ids_0 + self.suffix_tokens # We don't expect to process pairs, but leave the pair logic for API consistency return self.prefix_tokens + token_ids_0 + token_ids_1 + self.suffix_tokens def _build_translation_inputs( self, raw_inputs, return_tensors: str, src_lang: Optional[str], tgt_lang: Optional[str], **extra_kwargs ): """Used by translation pipeline, to prepare inputs for the generate function""" if src_lang is None or tgt_lang is None: raise ValueError("Translation requires a `src_lang` and a `tgt_lang` for this model") self.src_lang = src_lang inputs = self(raw_inputs, add_special_tokens=True, return_tensors=return_tensors, **extra_kwargs) tgt_lang_id = self.convert_tokens_to_ids(tgt_lang) inputs["forced_bos_token_id"] = tgt_lang_id return inputs def prepare_seq2seq_batch( self, src_texts: List[str], src_lang: str = "en_XX", tgt_texts: Optional[List[str]] = None, tgt_lang: str = "ro_RO", **kwargs, ) -> BatchEncoding: self.src_lang = src_lang self.tgt_lang = tgt_lang return super().prepare_seq2seq_batch(src_texts, tgt_texts, **kwargs) def _switch_to_input_mode(self): return self.set_src_lang_special_tokens(self.src_lang) def _switch_to_target_mode(self): return self.set_tgt_lang_special_tokens(self.tgt_lang) def set_src_lang_special_tokens(self, src_lang: str) -> None: """Reset the special tokens to the source lang setting. prefix=[src_lang_code] and suffix=[eos].""" self.cur_lang_code_id = self.lang_code_to_id[src_lang] self.prefix_tokens = [self.cur_lang_code_id] self.suffix_tokens = [self.eos_token_id] def set_tgt_lang_special_tokens(self, tgt_lang: str) -> None: """Reset the special tokens to the target language setting. prefix=[tgt_lang_code] and suffix=[eos].""" self.cur_lang_code_id = self.lang_code_to_id[tgt_lang] self.prefix_tokens = [self.cur_lang_code_id] self.suffix_tokens = [self.eos_token_id]
27182812/ChatGLM-LLaMA-chinese-insturct
12,192
src/transformers/models/mbart50/tokenization_mbart50_fast.py
# coding=utf-8 # Copyright 2021 The Facebook AI Research Team Authors and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from shutil import copyfile from typing import List, Optional, Tuple from tokenizers import processors from ...tokenization_utils import AddedToken, BatchEncoding from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tokenization_mbart50 import MBart50Tokenizer else: MBart50Tokenizer = None logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "sentencepiece.bpe.model", "tokenizer_file": "tokenizer.json"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "facebook/mbart-large-50-one-to-many-mmt": ( "https://huggingface.co/facebook/mbart-large-50-one-to-many-mmt/resolve/main/sentencepiece.bpe.model" ), }, "tokenizer_file": { "facebook/mbart-large-50-one-to-many-mmt": ( "https://huggingface.co/facebook/mbart-large-50-one-to-many-mmt/resolve/main/tokenizer.json" ), }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "facebook/mbart-large-50-one-to-many-mmt": 1024, } # fmt: off FAIRSEQ_LANGUAGE_CODES = ["ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN", "af_ZA", "az_AZ", "bn_IN", "fa_IR", "he_IL", "hr_HR", "id_ID", "ka_GE", "km_KH", "mk_MK", "ml_IN", "mn_MN", "mr_IN", "pl_PL", "ps_AF", "pt_XX", "sv_SE", "sw_KE", "ta_IN", "te_IN", "th_TH", "tl_XX", "uk_UA", "ur_PK", "xh_ZA", "gl_ES", "sl_SI"] # fmt: on class MBart50TokenizerFast(PreTrainedTokenizerFast): """ Construct a "fast" MBART tokenizer for mBART-50 (backed by HuggingFace's *tokenizers* library). Based on [BPE](https://huggingface.co/docs/tokenizers/python/latest/components.html?highlight=BPE#models). This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): Path to the vocabulary file. src_lang (`str`, *optional*): A string representing the source language. tgt_lang (`str`, *optional*): A string representing the target language. eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. sep_token (`str`, *optional*, defaults to `"</s>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. cls_token (`str`, *optional*, defaults to `"<s>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. Examples: ```python >>> from transformers import MBart50TokenizerFast >>> tokenizer = MBart50TokenizerFast.from_pretrained("facebook/mbart-large-50", src_lang="en_XX", tgt_lang="ro_RO") >>> src_text = " UN Chief Says There Is No Military Solution in Syria" >>> tgt_text = "Şeful ONU declară că nu există o soluţie militară în Siria" >>> model_inputs = tokenizer(src_text, text_target=tgt_text, return_tensors="pt") >>> # model(**model_inputs) should work ```""" vocab_files_names = VOCAB_FILES_NAMES max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP model_input_names = ["input_ids", "attention_mask"] slow_tokenizer_class = MBart50Tokenizer prefix_tokens: List[int] = [] suffix_tokens: List[int] = [] def __init__( self, vocab_file=None, src_lang=None, tgt_lang=None, tokenizer_file=None, eos_token="</s>", sep_token="</s>", cls_token="<s>", unk_token="<unk>", pad_token="<pad>", mask_token="<mask>", **kwargs, ): # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token kwargs["additional_special_tokens"] = kwargs.get("additional_special_tokens", []) kwargs["additional_special_tokens"] += [ code for code in FAIRSEQ_LANGUAGE_CODES if code not in kwargs["additional_special_tokens"] ] super().__init__( vocab_file, src_lang=src_lang, tgt_lang=tgt_lang, tokenizer_file=tokenizer_file, eos_token=eos_token, sep_token=sep_token, cls_token=cls_token, unk_token=unk_token, pad_token=pad_token, mask_token=mask_token, **kwargs, ) self.vocab_file = vocab_file self.can_save_slow_tokenizer = False if not self.vocab_file else True self.lang_code_to_id = { lang_code: self.convert_tokens_to_ids(lang_code) for lang_code in FAIRSEQ_LANGUAGE_CODES } self._src_lang = src_lang if src_lang is not None else "en_XX" self.tgt_lang = tgt_lang self.cur_lang_code_id = self.lang_code_to_id[self._src_lang] self.set_src_lang_special_tokens(self._src_lang) @property def src_lang(self) -> str: return self._src_lang @src_lang.setter def src_lang(self, new_src_lang: str) -> None: self._src_lang = new_src_lang self.set_src_lang_special_tokens(self._src_lang) def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. The special tokens depend on calling set_lang. An MBART-50 sequence has the following format, where `X` represents the sequence: - `input_ids` (for encoder) `[src_lang_code] X [eos]` - `labels`: (for decoder) `[tgt_lang_code] X [eos]` BOS is never used. Pairs of sequences are not the expected use case, but they will be handled without a separator. Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: list of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ if token_ids_1 is None: return self.prefix_tokens + token_ids_0 + self.suffix_tokens # We don't expect to process pairs, but leave the pair logic for API consistency return self.prefix_tokens + token_ids_0 + token_ids_1 + self.suffix_tokens def prepare_seq2seq_batch( self, src_texts: List[str], src_lang: str = "en_XX", tgt_texts: Optional[List[str]] = None, tgt_lang: str = "ro_RO", **kwargs, ) -> BatchEncoding: self.src_lang = src_lang self.tgt_lang = tgt_lang return super().prepare_seq2seq_batch(src_texts, tgt_texts, **kwargs) def _switch_to_input_mode(self): return self.set_src_lang_special_tokens(self.src_lang) def _switch_to_target_mode(self): return self.set_tgt_lang_special_tokens(self.tgt_lang) def set_src_lang_special_tokens(self, src_lang: str) -> None: """Reset the special tokens to the source lang setting. prefix=[src_lang_code] and suffix=[eos].""" self.cur_lang_code_id = self.convert_tokens_to_ids(src_lang) self.prefix_tokens = [self.cur_lang_code_id] self.suffix_tokens = [self.eos_token_id] prefix_tokens_str = self.convert_ids_to_tokens(self.prefix_tokens) suffix_tokens_str = self.convert_ids_to_tokens(self.suffix_tokens) self._tokenizer.post_processor = processors.TemplateProcessing( single=prefix_tokens_str + ["$A"] + suffix_tokens_str, pair=prefix_tokens_str + ["$A", "$B"] + suffix_tokens_str, special_tokens=list(zip(prefix_tokens_str + suffix_tokens_str, self.prefix_tokens + self.suffix_tokens)), ) def set_tgt_lang_special_tokens(self, tgt_lang: str) -> None: """Reset the special tokens to the target language setting. prefix=[src_lang_code] and suffix=[eos].""" self.cur_lang_code_id = self.convert_tokens_to_ids(tgt_lang) self.prefix_tokens = [self.cur_lang_code_id] self.suffix_tokens = [self.eos_token_id] prefix_tokens_str = self.convert_ids_to_tokens(self.prefix_tokens) suffix_tokens_str = self.convert_ids_to_tokens(self.suffix_tokens) self._tokenizer.post_processor = processors.TemplateProcessing( single=prefix_tokens_str + ["$A"] + suffix_tokens_str, pair=prefix_tokens_str + ["$A", "$B"] + suffix_tokens_str, special_tokens=list(zip(prefix_tokens_str + suffix_tokens_str, self.prefix_tokens + self.suffix_tokens)), ) def _build_translation_inputs( self, raw_inputs, return_tensors: str, src_lang: Optional[str], tgt_lang: Optional[str], **extra_kwargs ): """Used by translation pipeline, to prepare inputs for the generate function""" if src_lang is None or tgt_lang is None: raise ValueError("Translation requires a `src_lang` and a `tgt_lang` for this model") self.src_lang = src_lang inputs = self(raw_inputs, add_special_tokens=True, return_tensors=return_tensors, **extra_kwargs) tgt_lang_id = self.convert_tokens_to_ids(tgt_lang) inputs["forced_bos_token_id"] = tgt_lang_id return inputs def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not self.can_save_slow_tokenizer: raise ValueError( "Your fast tokenizer does not have the necessary information to save the vocabulary for a slow " "tokenizer." ) if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file): copyfile(self.vocab_file, out_vocab_file) return (out_vocab_file,)
27182812/ChatGLM-LLaMA-chinese-insturct
8,437
src/transformers/models/lxmert/tokenization_lxmert_fast.py
# coding=utf-8 # Copyright 2020 The Google AI Team, Stanford University and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from typing import List, Optional, Tuple from tokenizers import normalizers from ...tokenization_utils_fast import PreTrainedTokenizerFast from .tokenization_lxmert import LxmertTokenizer VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt", "tokenizer_file": "tokenizer.json"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "unc-nlp/lxmert-base-uncased": "https://huggingface.co/unc-nlp/lxmert-base-uncased/resolve/main/vocab.txt", }, "tokenizer_file": { "unc-nlp/lxmert-base-uncased": ( "https://huggingface.co/unc-nlp/lxmert-base-uncased/resolve/main/tokenizer.json" ), }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "unc-nlp/lxmert-base-uncased": 512, } PRETRAINED_INIT_CONFIGURATION = { "unc-nlp/lxmert-base-uncased": {"do_lower_case": True}, } # Copied from transformers.models.bert.tokenization_bert_fast.BertTokenizerFast with bert-base-cased->unc-nlp/lxmert-base-uncased, BERT->Lxmert, Bert->Lxmert class LxmertTokenizerFast(PreTrainedTokenizerFast): r""" Construct a "fast" Lxmert tokenizer (backed by HuggingFace's *tokenizers* library). Based on WordPiece. This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): File containing the vocabulary. do_lower_case (`bool`, *optional*, defaults to `True`): Whether or not to lowercase the input when tokenizing. unk_token (`str`, *optional*, defaults to `"[UNK]"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"[SEP]"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"[PAD]"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"[CLS]"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"[MASK]"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. clean_text (`bool`, *optional*, defaults to `True`): Whether or not to clean the text before tokenization by removing any control characters and replacing all whitespaces by the classic one. tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see [this issue](https://github.com/huggingface/transformers/issues/328)). strip_accents (`bool`, *optional*): Whether or not to strip all accents. If this option is not specified, then it will be determined by the value for `lowercase` (as in the original Lxmert). wordpieces_prefix (`str`, *optional*, defaults to `"##"`): The prefix for subwords. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES slow_tokenizer_class = LxmertTokenizer def __init__( self, vocab_file=None, tokenizer_file=None, do_lower_case=True, unk_token="[UNK]", sep_token="[SEP]", pad_token="[PAD]", cls_token="[CLS]", mask_token="[MASK]", tokenize_chinese_chars=True, strip_accents=None, **kwargs, ): super().__init__( vocab_file, tokenizer_file=tokenizer_file, do_lower_case=do_lower_case, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, **kwargs, ) normalizer_state = json.loads(self.backend_tokenizer.normalizer.__getstate__()) if ( normalizer_state.get("lowercase", do_lower_case) != do_lower_case or normalizer_state.get("strip_accents", strip_accents) != strip_accents or normalizer_state.get("handle_chinese_chars", tokenize_chinese_chars) != tokenize_chinese_chars ): normalizer_class = getattr(normalizers, normalizer_state.pop("type")) normalizer_state["lowercase"] = do_lower_case normalizer_state["strip_accents"] = strip_accents normalizer_state["handle_chinese_chars"] = tokenize_chinese_chars self.backend_tokenizer.normalizer = normalizer_class(**normalizer_state) self.do_lower_case = do_lower_case def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None): """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A Lxmert sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id] if token_ids_1: output += token_ids_1 + [self.sep_token_id] return output def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. A Lxmert sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: files = self._tokenizer.model.save(save_directory, name=filename_prefix) return tuple(files)
27182812/ChatGLM-LLaMA-chinese-insturct
64,261
src/transformers/models/lxmert/modeling_tf_lxmert.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors, The HuggingFace Inc. team, and the # Lxmert Authors. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TF 2.0 LXMERT model.""" import warnings from dataclasses import dataclass from typing import Dict, Optional, Tuple, Union import numpy as np import tensorflow as tf from ...activations_tf import get_tf_activation from ...modeling_tf_utils import ( TFModelInputType, TFPreTrainedModel, get_initializer, keras_serializable, shape_list, unpack_inputs, ) from ...tf_utils import stable_softmax from ...utils import ( ModelOutput, add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_lxmert import LxmertConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "unc-nlp/lxmert-base-uncased" _CONFIG_FOR_DOC = "LxmertConfig" TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ "unc-nlp/lxmert-base-uncased", ] @dataclass class TFLxmertModelOutput(ModelOutput): """ Lxmert's outputs that contain the last hidden states, pooled outputs, and attention probabilities for the language, visual, and, cross-modality encoders. (note: the visual encoder in Lxmert is referred to as the "relation-ship" encoder") Args: language_output (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the language encoder. vision_output (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the visual encoder. pooled_output (`tf.Tensor` of shape `(batch_size, hidden_size)`): Last layer hidden-state of the first token of the sequence (classification, CLS, token) further processed by a Linear layer and a Tanh activation function. The Linear language_hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. vision_hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. language_attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. vision_attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. cross_encoder_attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ language_output: Optional[tf.Tensor] = None vision_output: Optional[tf.Tensor] = None pooled_output: Optional[tf.Tensor] = None language_hidden_states: Optional[Tuple[tf.Tensor]] = None vision_hidden_states: Optional[Tuple[tf.Tensor]] = None language_attentions: Optional[Tuple[tf.Tensor]] = None vision_attentions: Optional[Tuple[tf.Tensor]] = None cross_encoder_attentions: Optional[Tuple[tf.Tensor]] = None @dataclass class TFLxmertForPreTrainingOutput(ModelOutput): """ Output type of [`LxmertForPreTraining`]. Args: loss (*optional*, returned when `labels` is provided, `tf.Tensor` of shape `(1,)`): Total loss as the sum of the masked language modeling loss and the next sequence prediction (classification) loss. prediction_logits (`tf.Tensor` of shape `(batch_size, sequence_length, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). cross_relationship_score: (`tf.Tensor` of shape `(batch_size, 2)`): Prediction scores of the textual matching objective (classification) head (scores of True/False continuation before SoftMax). question_answering_score: (`tf.Tensor` of shape `(batch_size, n_qa_answers)`): Prediction scores of question answering objective (classification). language_hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. vision_hidden_states (`tuple(tf.Tensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `tf.Tensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. language_attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. vision_attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. cross_encoder_attentions (`tuple(tf.Tensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `tf.Tensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[tf.Tensor] = None prediction_logits: Optional[tf.Tensor] = None cross_relationship_score: Optional[tf.Tensor] = None question_answering_score: Optional[tf.Tensor] = None language_hidden_states: Optional[Tuple[tf.Tensor]] = None vision_hidden_states: Optional[Tuple[tf.Tensor]] = None language_attentions: Optional[Tuple[tf.Tensor]] = None vision_attentions: Optional[Tuple[tf.Tensor]] = None cross_encoder_attentions: Optional[Tuple[tf.Tensor]] = None class TFLxmertVisualFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) # Object feature encoding self.visn_fc = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="visn_fc", ) self.visn_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name="visn_layer_norm" ) # Box position encoding self.box_fc = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="box_fc", ) self.box_layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="box_layer_norm") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) def call(self, visn_input, training=False): feats, boxes = visn_input x = self.visn_fc(feats) x = self.visn_layer_norm(x) y = self.box_fc(boxes) y = self.box_layer_norm(y) output = (x + y) / 2 output = self.dropout(output, training=training) return output class TFLxmertEmbeddings(tf.keras.layers.Layer): """Construct the embeddings from word, position and token_type embeddings.""" def __init__(self, config, **kwargs): super().__init__(**kwargs) self.config = config self.hidden_size = config.hidden_size self.max_position_embeddings = config.max_position_embeddings self.initializer_range = config.initializer_range self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") self.dropout = tf.keras.layers.Dropout(rate=config.hidden_dropout_prob) def build(self, input_shape): with tf.name_scope("word_embeddings"): self.weight = self.add_weight( name="weight", shape=[self.config.vocab_size, self.hidden_size], initializer=get_initializer(initializer_range=self.initializer_range), ) with tf.name_scope("token_type_embeddings"): self.token_type_embeddings = self.add_weight( name="embeddings", shape=[self.config.type_vocab_size, self.hidden_size], initializer=get_initializer(initializer_range=self.initializer_range), ) with tf.name_scope("position_embeddings"): self.position_embeddings = self.add_weight( name="embeddings", shape=[self.max_position_embeddings, self.hidden_size], initializer=get_initializer(initializer_range=self.initializer_range), ) super().build(input_shape) def call(self, input_ids=None, token_type_ids=None, inputs_embeds=None, training=False): """ Applies embedding based on inputs tensor. Returns: final_embeddings (`tf.Tensor`): output embedding tensor. """ assert not (input_ids is None and inputs_embeds is None) if input_ids is not None: # Note: tf.gather, on which the embedding layer is based, won't check positive out of bound # indices on GPU, returning zeros instead. This is a dangerous silent behavior. tf.debugging.assert_less( input_ids, tf.cast(self.config.vocab_size, dtype=input_ids.dtype), message=( "input_ids must be smaller than the embedding layer's input dimension (got" f" {tf.math.reduce_max(input_ids)} >= {self.config.vocab_size})" ), ) inputs_embeds = tf.gather(params=self.weight, indices=input_ids) input_shape = shape_list(inputs_embeds)[:-1] if token_type_ids is None: token_type_ids = tf.fill(dims=input_shape, value=0) position_ids = tf.expand_dims(tf.range(start=0, limit=input_shape[-1]), axis=0) position_embeds = tf.gather(params=self.position_embeddings, indices=position_ids) token_type_embeds = tf.gather(params=self.token_type_embeddings, indices=token_type_ids) final_embeddings = inputs_embeds + position_embeds + token_type_embeds final_embeddings = self.LayerNorm(inputs=final_embeddings) final_embeddings = self.dropout(inputs=final_embeddings, training=training) return final_embeddings class TFLxmertAttention(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) if config.hidden_size % config.num_attention_heads != 0: raise ValueError( f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention " f"heads ({config.num_attention_heads}" ) self.num_attention_heads = config.num_attention_heads assert config.hidden_size % config.num_attention_heads == 0 self.attention_head_size = int(config.hidden_size / config.num_attention_heads) self.all_head_size = self.num_attention_heads * self.attention_head_size self.query = tf.keras.layers.Dense( self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="query", ) self.key = tf.keras.layers.Dense( self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="key", ) self.value = tf.keras.layers.Dense( self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="value", ) self.dropout = tf.keras.layers.Dropout(config.attention_probs_dropout_prob) def transpose_for_scores(self, x, batch_size): # Reshape from [batch_size, seq_length, all_head_size] to [batch_size, seq_length, num_attention_heads, attention_head_size] x = tf.reshape(x, (batch_size, -1, self.num_attention_heads, self.attention_head_size)) return tf.transpose(x, perm=[0, 2, 1, 3]) def call(self, hidden_states, context, attention_mask, output_attentions, training=False): batch_size = shape_list(hidden_states)[0] mixed_query_layer = self.query(hidden_states) mixed_key_layer = self.key(context) mixed_value_layer = self.value(context) query_layer = self.transpose_for_scores(mixed_query_layer, batch_size) key_layer = self.transpose_for_scores(mixed_key_layer, batch_size) value_layer = self.transpose_for_scores(mixed_value_layer, batch_size) # Take the dot product between "query" and "key" to get the raw attention scores. attention_scores = tf.matmul( query_layer, key_layer, transpose_b=True ) # (batch size, num_heads, seq_len_q, seq_len_k) dk = tf.cast(shape_list(key_layer)[-1], dtype=attention_scores.dtype) # scale attention_scores attention_scores = attention_scores / tf.math.sqrt(dk) if attention_mask is not None: # Apply the attention mask is (precomputed for all layers in TFLxmertModel call() function) attention_mask = tf.cast(attention_mask, dtype=attention_scores.dtype) attention_scores = attention_scores + attention_mask # Normalize the attention scores to probabilities. attention_probs = stable_softmax(attention_scores, axis=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs, training=training) context_layer = tf.matmul(attention_probs, value_layer) context_layer = tf.transpose(context_layer, perm=[0, 2, 1, 3]) context_layer = tf.reshape( context_layer, (batch_size, -1, self.all_head_size) ) # (batch_size, seq_len_q, all_head_size) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) return outputs class TFLxmertIntermediate(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), name="dense", ) if isinstance(config.hidden_act, str): self.intermediate_act_fn = get_tf_activation(config.hidden_act) else: self.intermediate_act_fn = config.hidden_act def call(self, hidden_states): hidden_states = self.dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states class TFLxmertOutput(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense", ) self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) def call(self, hidden_states, input_tensor, training=False): hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states, training) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class TFLxmertAttentionOutput(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense", ) self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) def call(self, hidden_states, input_tensor, training=False): hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class TFLxmertSelfAttentionLayer(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.self = TFLxmertAttention(config, name="self") self.attention_output = TFLxmertAttentionOutput(config, name="output") def call(self, input_tensor, attention_mask, output_attentions, training=False): # Self attention attends to itself, thus keys and queries are the same (input_tensor). self_output = self.self(input_tensor, input_tensor, attention_mask, output_attentions) if output_attentions: attention_probs = self_output[1] attention_output = self.attention_output(self_output[0], input_tensor) return (attention_output, attention_probs) if output_attentions else (attention_output,) class TFLxmertCrossAttentionLayer(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.att = TFLxmertAttention(config, name="att") self.attention_output = TFLxmertAttentionOutput(config, name="output") def call( self, input_tensor, ctx_tensor, ctx_att_mask, output_attentions=False, training=False, ): output = self.att(input_tensor, ctx_tensor, ctx_att_mask, output_attentions, training=training) if output_attentions: attention_probs = output[1] attention_output = self.attention_output(output[0], input_tensor, training=training) outputs = (attention_output, attention_probs) if output_attentions else (attention_output,) return outputs class TFLxmertLayer(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.attention = TFLxmertSelfAttentionLayer(config, name="attention") self.intermediate = TFLxmertIntermediate(config, name="intermediate") self.transformer_output = TFLxmertOutput(config, name="output") def call(self, hidden_states, attention_mask, output_attentions, training=False): attention_outputs = self.attention(hidden_states, attention_mask, output_attentions, training=training) attention_output = attention_outputs[0] intermediate_output = self.intermediate(attention_output) layer_output = self.transformer_output(intermediate_output, attention_output, training=training) outputs = (layer_output,) + attention_outputs[1:] # add attentions if we output them return outputs class TFLxmertXLayer(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.visual_attention = TFLxmertCrossAttentionLayer(config, name="visual_attention") # Self-attention Layers self.lang_self_att = TFLxmertSelfAttentionLayer(config, name="lang_self_att") self.visn_self_att = TFLxmertSelfAttentionLayer(config, name="visn_self_att") # Intermediate and Output Layers (FFNs) self.lang_inter = TFLxmertIntermediate(config, name="lang_inter") self.lang_output = TFLxmertOutput(config, name="lang_output") self.visn_inter = TFLxmertIntermediate(config, name="visn_inter") self.visn_output = TFLxmertOutput(config, name="visn_output") def cross_att( self, lang_input, lang_attention_mask, visn_input, visn_attention_mask, output_attentions, training=False, ): # Cross Attention # Keras saving and loading model *does not work* with the same inputs for two layers. lang_attention_lang_input = tf.identity(lang_input) visn_attention_lang_input = tf.identity(lang_input) lang_attention_visn_input = tf.identity(visn_input) visn_attention_visn_input = tf.identity(visn_input) lang_att_output = self.visual_attention( lang_attention_lang_input, lang_attention_visn_input, visn_attention_mask, output_attentions=output_attentions, training=training, ) visn_att_output = self.visual_attention( visn_attention_visn_input, visn_attention_lang_input, lang_attention_mask, output_attentions=output_attentions, training=training, ) return lang_att_output, visn_att_output def self_att( self, lang_input, lang_attention_mask, visn_input, visn_attention_mask, training=False, ): # Self Attention output_attentions = False lang_att_output = self.lang_self_att(lang_input, lang_attention_mask, output_attentions, training=training) visn_att_output = self.visn_self_att(visn_input, visn_attention_mask, output_attentions, training=training) return lang_att_output[0], visn_att_output[0] def output_fc(self, lang_input, visn_input, training=False): # FC layers lang_inter_output = self.lang_inter(lang_input) visn_inter_output = self.visn_inter(visn_input) # Layer output lang_output = self.lang_output(lang_inter_output, lang_input, training) visn_output = self.visn_output(visn_inter_output, visn_input, training) return lang_output, visn_output def call( self, lang_feats, lang_attention_mask, visn_feats, visn_attention_mask, output_attentions, training=False, ): lang_att_output = lang_feats visn_att_output = visn_feats lang_att_output, visn_att_output = self.cross_att( lang_att_output, lang_attention_mask, visn_att_output, visn_attention_mask, output_attentions, training=training, ) attention_probs = lang_att_output[1:] lang_att_output, visn_att_output = self.self_att( lang_att_output[0], lang_attention_mask, visn_att_output[0], visn_attention_mask, training=training, ) lang_output, visn_output = self.output_fc(lang_att_output, visn_att_output, training=training) return (lang_output, visn_output, attention_probs[0]) if output_attentions else (lang_output, visn_output) class TFLxmertEncoder(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.visn_fc = TFLxmertVisualFeatureEncoder(config, name="visn_fc") # Number of layers self.num_l_layers = config.l_layers self.num_x_layers = config.x_layers self.num_r_layers = config.r_layers # Layers # Using self.layer instead of self.l_layer to support loading BERT weights. self.layer = [TFLxmertLayer(config, name=f"layer_._{i}") for i in range(self.num_l_layers)] self.x_layers = [TFLxmertXLayer(config, name=f"x_layers_._{i}") for i in range(self.num_x_layers)] self.r_layers = [TFLxmertLayer(config, name=f"r_layers_._{i}") for i in range(self.num_r_layers)] self.config = config def call( self, lang_feats=None, lang_attention_mask=None, visual_feats=None, visual_pos=None, visual_attention_mask=None, output_attentions=None, training=False, ): vision_hidden_states = () language_hidden_states = () vision_attentions = () if output_attentions or self.config.output_attentions else None language_attentions = () if output_attentions or self.config.output_attentions else None cross_encoder_attentions = () if output_attentions or self.config.output_attentions else None visual_feats = self.visn_fc([visual_feats, visual_pos], training=training) # Run language layers for layer_module in self.layer: l_outputs = layer_module(lang_feats, lang_attention_mask, output_attentions, training=training) lang_feats = l_outputs[0] language_hidden_states = language_hidden_states + (lang_feats,) if language_attentions is not None: language_attentions = language_attentions + (l_outputs[1],) # Run relational layers for layer_module in self.r_layers: v_outputs = layer_module( visual_feats, visual_attention_mask, output_attentions, training=training, ) visual_feats = v_outputs[0] vision_hidden_states = vision_hidden_states + (visual_feats,) if vision_attentions is not None: vision_attentions = vision_attentions + (v_outputs[1],) # Run cross-modality layers for layer_module in self.x_layers: x_outputs = layer_module( lang_feats, lang_attention_mask, visual_feats, visual_attention_mask, output_attentions, training=training, ) lang_feats, visual_feats = x_outputs[:2] vision_hidden_states = vision_hidden_states + (visual_feats,) language_hidden_states = language_hidden_states + (lang_feats,) if cross_encoder_attentions is not None: cross_encoder_attentions = cross_encoder_attentions + (x_outputs[2],) visual_encoder_outputs = ( vision_hidden_states, vision_attentions if output_attentions else None, ) lang_encoder_outputs = ( language_hidden_states, language_attentions if output_attentions else None, ) return ( visual_encoder_outputs, lang_encoder_outputs, cross_encoder_attentions if output_attentions else None, ) @keras_serializable class TFLxmertMainLayer(tf.keras.layers.Layer): config_class = LxmertConfig @property def dummy_inputs(self): """ Dummy inputs to build the network. Returns: tf.Tensor with dummy inputs """ batch_size = 2 num_visual_features = 10 input_ids = tf.constant([[3, 5, 6], [2, 3, 4]], dtype=tf.int32) visual_feats = tf.random.uniform((batch_size, num_visual_features, self.config.visual_feat_dim)) visual_pos = tf.random.uniform((batch_size, num_visual_features, 4)) return { "input_ids": input_ids, "visual_feats": visual_feats, "visual_pos": visual_pos, } def __init__(self, config, **kwargs): super().__init__(**kwargs) self.config = config self.num_l_layers = config.l_layers self.num_x_layers = config.x_layers self.num_r_layers = config.r_layers self.initializer_range = config.initializer_range self.output_attentions = config.output_attentions self.output_hidden_states = config.output_hidden_states self.return_dict = config.use_return_dict self.embeddings = TFLxmertEmbeddings(config, name="embeddings") self.encoder = TFLxmertEncoder(config, name="encoder") self.pooler = TFLxmertPooler(config, name="pooler") self.config = config def get_input_embeddings(self): return self.embeddings def set_input_embeddings(self, value): self.embeddings.weight = value self.embeddings.vocab_size = shape_list(value)[0] def _prune_heads(self, heads_to_prune): raise NotImplementedError @unpack_inputs def call( self, input_ids=None, visual_feats=None, visual_pos=None, attention_mask=None, visual_attention_mask=None, token_type_ids=None, inputs_embeds=None, output_attentions=None, output_hidden_states=None, return_dict=None, training=False, ): if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = shape_list(input_ids) elif inputs_embeds is not None: input_shape = shape_list(inputs_embeds)[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if visual_pos is None or visual_feats is None: raise ValueError("visual_feats and visual_pos cannot be `None` in LXMERT's `call` method.") if attention_mask is None: attention_mask = tf.fill(input_shape, 1) if token_type_ids is None: token_type_ids = tf.fill(input_shape, 0) # Positional Word Embeddings embedding_output = self.embeddings(input_ids, token_type_ids, inputs_embeds, training) # We create a 3D attention mask from a 2D tensor mask. # Sizes are [batch_size, 1, 1, to_seq_length] # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] # this attention mask is more simple than the triangular masking of causal attention # used in OpenAI GPT, we just need to prepare the broadcast dimension here. extended_attention_mask = tf.reshape(attention_mask, (input_shape[0], 1, 1, input_shape[1])) # Since attention_mask is 1.0 for positions we want to attend and 0.0 for # masked positions, this operation will create a tensor which is 0.0 for # positions we want to attend and -10000.0 for masked positions. # Since we are adding it to the raw scores before the softmax, this is # effectively the same as removing these entirely. extended_attention_mask = tf.cast(extended_attention_mask, dtype=embedding_output.dtype) one_cst = tf.constant(1.0, dtype=embedding_output.dtype) ten_thousand_cst = tf.constant(-10000.0, dtype=embedding_output.dtype) extended_attention_mask = tf.multiply(tf.subtract(one_cst, extended_attention_mask), ten_thousand_cst) if visual_attention_mask is not None: extended_visual_attention_mask = tf.reshape(visual_attention_mask, (input_shape[0], 1, 1, input_shape[1])) extended_visual_attention_mask = tf.expand_dims(tf.expand_dims(visual_attention_mask, axis=1), axis=1) extended_visual_attention_mask = tf.cast(extended_visual_attention_mask, dtype=embedding_output.dtype) extended_visual_attention_mask = tf.multiply( tf.subtract(one_cst, extended_visual_attention_mask), ten_thousand_cst ) else: extended_visual_attention_mask = None # Run Lxmert encoder encoder_outputs = self.encoder( embedding_output, extended_attention_mask, visual_feats, visual_pos, extended_visual_attention_mask, output_attentions, training, ) visual_encoder_outputs, lang_encoder_outputs = encoder_outputs[:2] vision_hidden_states = visual_encoder_outputs[0] language_hidden_states = lang_encoder_outputs[0] all_attentions = () if output_attentions: language_attentions = lang_encoder_outputs[1] vision_attentions = visual_encoder_outputs[1] cross_encoder_attentions = encoder_outputs[2] all_attentions = ( language_attentions, vision_attentions, cross_encoder_attentions, ) hidden_states = (language_hidden_states, vision_hidden_states) if output_hidden_states else () visual_output = vision_hidden_states[-1] lang_output = language_hidden_states[-1] pooled_output = self.pooler(lang_output) if not return_dict: return (lang_output, visual_output, pooled_output) + hidden_states + all_attentions return TFLxmertModelOutput( pooled_output=pooled_output, language_output=lang_output, vision_output=visual_output, language_hidden_states=language_hidden_states if output_hidden_states else None, vision_hidden_states=vision_hidden_states if output_hidden_states else None, language_attentions=language_attentions if output_attentions else None, vision_attentions=vision_attentions if output_attentions else None, cross_encoder_attentions=cross_encoder_attentions if output_attentions else None, ) class TFLxmertPreTrainedModel(TFPreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = LxmertConfig base_model_prefix = "lxmert" @property def dummy_inputs(self) -> Dict[str, tf.Tensor]: return getattr(self, self.base_model_prefix).dummy_inputs @tf.function( input_signature=[ { "input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"), "attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"), "visual_feats": tf.TensorSpec((None, None, None), tf.float32, name="visual_feats"), "visual_pos": tf.TensorSpec((None, None, None), tf.float32, name="visual_pos"), "visual_attention_mask": tf.TensorSpec((None, None), tf.int32, name="visual_attention_mask"), "token_type_ids": tf.TensorSpec((None, None), tf.int32, name="token_type_ids"), } ] ) def serving(self, inputs): output = self.call(inputs) return self.serving_output(output) LXMERT_START_DOCSTRING = r""" The LXMERT model was proposed in [LXMERT: Learning Cross-Modality Encoder Representations from Transformers](https://arxiv.org/abs/1908.07490) by Hao Tan and Mohit Bansal. It's a vision and language transformer model, pre-trained on a variety of multi-modal datasets comprising of GQA, VQAv2.0, MCSCOCO captions, and Visual genome, using a combination of masked language modeling, region of interest feature regression, cross entropy loss for question answering attribute prediction, and object tag prediction. This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage and behavior. <Tip> TensorFlow models and layers in `transformers` accept two formats as input: - having all inputs as keyword arguments (like PyTorch models), or - having all inputs as a list, tuple or dict in the first positional argument. The reason the second format is supported is that Keras methods prefer this format when passing inputs to models and layers. Because of this support, when using methods like `model.fit()` things should "just work" for you - just pass your inputs and labels in any format that `model.fit()` supports! If, however, you want to use the second format outside of Keras methods like `fit()` and `predict()`, such as when creating your own layers or models with the Keras `Functional` API, there are three possibilities you can use to gather all the input Tensors in the first positional argument: - a single Tensor with `input_ids` only and nothing else: `model(input_ids)` - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring: `model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])` - a dictionary with one or several input Tensors associated to the input names given in the docstring: `model({"input_ids": input_ids, "token_type_ids": token_type_ids})` Note that when creating models and layers with [subclassing](https://keras.io/guides/making_new_layers_and_models_via_subclassing/) then you don't need to worry about any of this, as you can just pass inputs like you would to any other Python function! </Tip> Parameters: config ([`LxmertConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ LXMERT_INPUTS_DOCSTRING = r""" Args: input_ids (`np.ndarray` or `tf.Tensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details. [What are input IDs?](../glossary#input-ids) visual_feats: (`tf.Tensor` of shape `(batch_size, num_visual_features, visual_feat_dim)`): This input represents visual features. They ROI pooled object features from bounding boxes using a faster-RCNN model) These are currently not provided by the transformers library. visual_pos: (`tf.Tensor` of shape `(batch_size, num_visual_features, visual_feat_dim)`): This input represents spacial features corresponding to their relative (via index) visual features. The pre-trained LXMERT model expects these spacial features to be normalized bounding boxes on a scale of 0 to 1. These are currently not provided by the transformers library. attention_mask (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) visual_attention_mask (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): MMask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) inputs_embeds (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in eager mode, in graph mode the value will always be set to True. training (`bool`, *optional*, defaults to `False`): Whether or not to use the model in training mode (some modules like dropout modules have different behaviors between training and evaluation). """ @add_start_docstrings( "The bare Lxmert Model transformer outputting raw hidden-states without any specific head on top.", LXMERT_START_DOCSTRING, ) class TFLxmertModel(TFLxmertPreTrainedModel): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.lxmert = TFLxmertMainLayer(config, name="lxmert") @unpack_inputs @add_start_docstrings_to_model_forward(LXMERT_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFLxmertModelOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, visual_feats: Optional[tf.Tensor] = None, visual_pos: Optional[tf.Tensor] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, visual_attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: bool = False, ) -> Union[Tuple, TFLxmertModelOutput]: outputs = self.lxmert( input_ids, visual_feats, visual_pos, attention_mask, visual_attention_mask, token_type_ids, inputs_embeds, output_attentions, output_hidden_states, return_dict, training, ) return outputs def serving_output(self, output): l_hs = tf.convert_to_tensor(output.language_hidden_states) if self.config.output_hidden_states else None v_hs = tf.convert_to_tensor(output.vision_hidden_states) if self.config.output_hidden_states else None l_attns = tf.convert_to_tensor(output.language_attentions) if self.config.output_attentions else None v_attns = tf.convert_to_tensor(output.vision_attentions) if self.config.output_attentions else None c_enc_attns = tf.convert_to_tensor(output.cross_encoder_attentions) if self.config.output_attentions else None return TFLxmertModelOutput( pooled_output=output.pooled_output, language_output=output.language_output, vision_output=output.vision_output, language_hidden_states=l_hs, vision_hidden_states=v_hs, language_attentions=l_attns, vision_attentions=v_attns, cross_encoder_attentions=c_enc_attns, ) class TFLxmertPooler(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), activation="tanh", name="dense", ) def call(self, hidden_states): # We "pool" the model by simply taking the hidden state corresponding # to the first token. first_token_tensor = hidden_states[:, 0] pooled_output = self.dense(first_token_tensor) return pooled_output # Copied from transformers.models.bert.modeling_tf_bert.TFBertPredictionHeadTransform with Bert->Lxmert class TFLxmertPredictionHeadTransform(tf.keras.layers.Layer): def __init__(self, config: LxmertConfig, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense", ) if isinstance(config.hidden_act, str): self.transform_act_fn = get_tf_activation(config.hidden_act) else: self.transform_act_fn = config.hidden_act self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.dense(inputs=hidden_states) hidden_states = self.transform_act_fn(hidden_states) hidden_states = self.LayerNorm(inputs=hidden_states) return hidden_states # Copied from transformers.models.bert.modeling_tf_bert.TFBertLMPredictionHead with Bert->Lxmert class TFLxmertLMPredictionHead(tf.keras.layers.Layer): def __init__(self, config: LxmertConfig, input_embeddings: tf.keras.layers.Layer, **kwargs): super().__init__(**kwargs) self.config = config self.hidden_size = config.hidden_size self.transform = TFLxmertPredictionHeadTransform(config, name="transform") # The output weights are the same as the input embeddings, but there is # an output-only bias for each token. self.input_embeddings = input_embeddings def build(self, input_shape: tf.TensorShape): self.bias = self.add_weight(shape=(self.config.vocab_size,), initializer="zeros", trainable=True, name="bias") super().build(input_shape) def get_output_embeddings(self) -> tf.keras.layers.Layer: return self.input_embeddings def set_output_embeddings(self, value: tf.Variable): self.input_embeddings.weight = value self.input_embeddings.vocab_size = shape_list(value)[0] def get_bias(self) -> Dict[str, tf.Variable]: return {"bias": self.bias} def set_bias(self, value: tf.Variable): self.bias = value["bias"] self.config.vocab_size = shape_list(value["bias"])[0] def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.transform(hidden_states=hidden_states) seq_length = shape_list(hidden_states)[1] hidden_states = tf.reshape(tensor=hidden_states, shape=[-1, self.hidden_size]) hidden_states = tf.matmul(a=hidden_states, b=self.input_embeddings.weight, transpose_b=True) hidden_states = tf.reshape(tensor=hidden_states, shape=[-1, seq_length, self.config.vocab_size]) hidden_states = tf.nn.bias_add(value=hidden_states, bias=self.bias) return hidden_states # Copied from transformers.models.bert.modeling_tf_bert.TFBertMLMHead with Bert->Lxmert class TFLxmertMLMHead(tf.keras.layers.Layer): def __init__(self, config: LxmertConfig, input_embeddings: tf.keras.layers.Layer, **kwargs): super().__init__(**kwargs) self.predictions = TFLxmertLMPredictionHead(config, input_embeddings, name="predictions") def call(self, sequence_output: tf.Tensor) -> tf.Tensor: prediction_scores = self.predictions(hidden_states=sequence_output) return prediction_scores class TFLxmertPreTrainingHeads(tf.keras.layers.Layer): def __init__(self, config, input_embeddings, **kwargs): super().__init__(**kwargs) self.predictions = TFLxmertLMPredictionHead(config, input_embeddings, name="predictions") self.seq_relationship = tf.keras.layers.Dense( 2, kernel_initializer=get_initializer(config.initializer_range), name="seq_relationship", ) def call(self, sequence_output, pooled_output): prediction_scores = self.predictions(sequence_output) seq_relationship_score = self.seq_relationship(pooled_output) return prediction_scores, seq_relationship_score class TFLxmertVisualAnswerHead(tf.keras.layers.Layer): def __init__(self, config, num_labels, **kwargs): super().__init__(**kwargs) hid_dim = config.hidden_size self.dense = tf.keras.layers.Dense( hid_dim * 2, kernel_initializer=get_initializer(config.initializer_range), name="logit_fc_._0", ) self.activation = get_tf_activation("gelu") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="logit_fc_._2") self.dense_1 = tf.keras.layers.Dense( num_labels, kernel_initializer=get_initializer(config.initializer_range), name="logit_fc_._3", ) def call(self, hidden_states): hidden_states = self.dense(hidden_states) hidden_states = self.activation(hidden_states) hidden_states = self.layer_norm(hidden_states) hidden_states = self.dense_1(hidden_states) return hidden_states class TFLxmertVisualObjHead(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.transform = TFLxmertPredictionHeadTransform(config, name="transform") # Decide the use of visual losses visual_losses = {} if config.visual_obj_loss: visual_losses["obj"] = {"shape": (-1,), "num": config.num_object_labels} if config.visual_attr_loss: visual_losses["attr"] = {"shape": (-1,), "num": config.num_attr_labels} if config.visual_feat_loss: visual_losses["feat"] = {"shape": (-1, 2048), "num": config.visual_feat_dim} self.visual_losses = visual_losses # The output weights are the same as the input embeddings, but there is # an output-only bias for each token. self.decoder_dict = { key: tf.keras.layers.Dense( self.visual_losses[key]["num"], kernel_initializer=get_initializer(config.initializer_range), name=f"decoder_dict.{key}", ) for key in self.visual_losses } def call(self, hidden_states): hidden_states = self.transform(hidden_states) output = {} for key in self.visual_losses: output[key] = self.decoder_dict[key](hidden_states) return output @add_start_docstrings("""Lxmert Model with a `language modeling` head on top.""", LXMERT_START_DOCSTRING) class TFLxmertForPreTraining(TFLxmertPreTrainedModel): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.config = config self.num_qa_labels = config.num_qa_labels self.visual_loss_normalizer = config.visual_loss_normalizer # Use of pretraining tasks self.task_mask_lm = config.task_mask_lm self.task_obj_predict = config.task_obj_predict self.task_matched = config.task_matched self.task_qa = config.task_qa # Lxmert backbone self.lxmert = TFLxmertMainLayer(config, name="lxmert") # Pre-training heads self.cls = TFLxmertPreTrainingHeads(config, self.lxmert.embeddings, name="cls") if self.task_obj_predict: self.obj_predict_head = TFLxmertVisualObjHead(config, name="obj_predict_head") if self.task_qa: self.answer_head = TFLxmertVisualAnswerHead(config, self.num_qa_labels, name="answer_head") # Loss functions self.loss_fcts = { "l2": tf.keras.losses.Huber(delta=1.0, name="huber_loss"), "visn_ce": tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), "ce": tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), } visual_losses = {} if config.visual_obj_loss: visual_losses["obj"] = { "shape": (-1,), "num": config.num_object_labels, "loss": "visn_ce", } if config.visual_attr_loss: visual_losses["attr"] = { "shape": (-1,), "num": config.num_attr_labels, "loss": "visn_ce", } if config.visual_feat_loss: visual_losses["feat"] = { "shape": (-1, config.visual_feat_dim), "num": config.visual_feat_dim, "loss": "l2", } self.visual_losses = visual_losses @property def dummy_inputs(self): """ Dummy inputs to build the network. Returns: tf.Tensor with dummy inputs """ batch_size = 2 num_visual_features = 10 input_ids = tf.constant([[3, 5, 6], [2, 3, 4]], dtype=tf.int32) visual_feats = tf.random.uniform((batch_size, num_visual_features, self.config.visual_feat_dim)) visual_pos = tf.random.uniform((batch_size, num_visual_features, 4)) if self.config.task_obj_predict: obj_labels = {} if self.config.visual_attr_loss and self.config.task_obj_predict: obj_labels["attr"] = ( tf.ones([batch_size, num_visual_features]), tf.ones([batch_size, num_visual_features]), ) if self.config.visual_feat_loss and self.config.task_obj_predict: obj_labels["feat"] = ( tf.ones([batch_size, num_visual_features, self.config.visual_feat_dim]), tf.ones([batch_size, num_visual_features]), ) if self.config.visual_obj_loss and self.config.task_obj_predict: obj_labels["obj"] = ( tf.ones([batch_size, num_visual_features]), tf.ones([batch_size, num_visual_features]), ) return { **{ "input_ids": input_ids, "visual_feats": visual_feats, "visual_pos": visual_pos, }, **({"obj_labels": obj_labels} if self.config.task_obj_predict else {}), } def get_lm_head(self): return self.cls.predictions def get_prefix_bias_name(self): warnings.warn("The method get_prefix_bias_name is deprecated. Please use `get_bias` instead.", FutureWarning) return self.name + "/" + self.cls.name + "/" + self.cls.predictions.name @unpack_inputs @add_start_docstrings_to_model_forward(LXMERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFLxmertForPreTrainingOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_ids=None, visual_feats=None, visual_pos=None, attention_mask=None, visual_attention_mask=None, token_type_ids=None, inputs_embeds=None, masked_lm_labels=None, obj_labels=None, matched_label=None, ans=None, output_attentions=None, output_hidden_states=None, return_dict=None, training=False, ): r""" masked_lm_labels (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]` obj_labels: (`Dict[Str: Tuple[tf.Tensor, tf.Tensor]]`, *optional*, defaults to `None`): each key is named after each one of the visual losses and each element of the tuple is of the shape `(batch_size, num_features)` and `(batch_size, num_features, visual_feature_dim)` for each the label id and the label score respectively matched_label (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for computing the whether or not the text input matches the image (classification) loss. Input should be a sequence pair (see `input_ids` docstring) Indices should be in `[0, 1]`: - 0 indicates that the sentence does not match the image, - 1 indicates that the sentence does match the image. ans (`Torch.Tensor` of shape `(batch_size)`, *optional*, defaults to `None`): a one hot representation hof the correct answer *optional* Returns: """ lxmert_output = self.lxmert( input_ids, visual_feats, visual_pos, attention_mask, visual_attention_mask, token_type_ids, inputs_embeds, output_attentions, output_hidden_states, return_dict, training, ) lang_output, visual_output, pooled_output = ( lxmert_output[0], lxmert_output[1], lxmert_output[2], ) lang_prediction_scores, cross_relationship_score = self.cls(lang_output, pooled_output) if self.task_qa: answer_score = self.answer_head(pooled_output) else: answer_score = pooled_output[0][0] total_loss = ( None if (masked_lm_labels is None and matched_label is None and obj_labels is None and ans is None) else tf.constant(0.0) ) losses = () if masked_lm_labels is not None and self.task_mask_lm: masked_lm_loss = self.loss_fcts["ce"]( tf.reshape(masked_lm_labels, [-1]), tf.reshape(lang_prediction_scores, [-1, self.config.vocab_size]), ) total_loss += masked_lm_loss losses += (masked_lm_loss,) if matched_label is not None and self.task_matched: matched_loss = self.loss_fcts["ce"]( tf.reshape(matched_label, [-1]), tf.reshape(cross_relationship_score, [-1, 2]), ) total_loss += matched_loss losses += (matched_loss,) if obj_labels is not None and self.task_obj_predict: total_visn_loss = 0.0 visn_prediction_scores_dict = self.obj_predict_head(visual_output) for key, key_info in self.visual_losses.items(): label, mask_conf = obj_labels[key] output_dim = key_info["num"] loss_fct_name = key_info["loss"] label_shape = key_info["shape"] weight = self.visual_loss_normalizer visn_loss_fct = self.loss_fcts[loss_fct_name] visn_prediction_scores = visn_prediction_scores_dict[key] visn_loss = visn_loss_fct( tf.reshape(label, label_shape), tf.reshape(visn_prediction_scores, [-1, output_dim]), ) if visn_loss.ndim > 1: # Regression Losses visn_loss = tf.reduce_mean(visn_loss) visn_loss = tf.reduce_mean(visn_loss * tf.cast(tf.reshape(mask_conf, [-1]), visn_loss.dtype)) * weight total_visn_loss += visn_loss losses += (visn_loss,) total_loss += total_visn_loss if ans is not None and self.task_qa: answer_loss = self.loss_fcts["ce"]( tf.reshape(ans, [-1]), tf.reshape(answer_score, [-1, self.num_qa_labels]) ) # exclude "*2" here to match the effect of QA losses. # Previous: (loss *0) for 6 epochs, (loss *2) for 6 epochs. (Used 10 instead of 6 in EMNLP paper) # Now : (loss *1) for 12 epochs # # * 2 # Multiply by 2 because > half of the data will not have label total_loss += answer_loss losses += (answer_loss,) # return total_loss, tf.stack(losses)[tf.new_axis, ...], answer_score.detach() if not return_dict: output = ( lang_prediction_scores, cross_relationship_score, answer_score, ) + lxmert_output[3:] return ((total_loss,) + output) if total_loss is not None else output return TFLxmertForPreTrainingOutput( loss=total_loss, prediction_logits=lang_prediction_scores, cross_relationship_score=cross_relationship_score, question_answering_score=answer_score, language_hidden_states=lxmert_output.language_hidden_states, vision_hidden_states=lxmert_output.vision_hidden_states, language_attentions=lxmert_output.language_attentions, vision_attentions=lxmert_output.vision_attentions, cross_encoder_attentions=lxmert_output.cross_encoder_attentions, ) def serving_output(self, output): l_hs = tf.convert_to_tensor(output.language_hidden_states) if self.config.output_hidden_states else None v_hs = tf.convert_to_tensor(output.vision_hidden_states) if self.config.output_hidden_states else None l_attns = tf.convert_to_tensor(output.language_attentions) if self.config.output_attentions else None v_attns = tf.convert_to_tensor(output.vision_attentions) if self.config.output_attentions else None c_enc_attns = tf.convert_to_tensor(output.cross_encoder_attentions) if self.config.output_attentions else None return TFLxmertForPreTrainingOutput( prediction_logits=output.prediction_logits, cross_relationship_score=output.cross_relationship_score, question_answering_score=output.question_answering_score, language_hidden_states=l_hs, vision_hidden_states=v_hs, language_attentions=l_attns, vision_attentions=v_attns, cross_encoder_attentions=c_enc_attns, )
27182812/ChatGLM-LLaMA-chinese-insturct
3,396
src/transformers/models/lxmert/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_tokenizers_available, is_torch_available, ) _import_structure = { "configuration_lxmert": ["LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "LxmertConfig"], "tokenization_lxmert": ["LxmertTokenizer"], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_lxmert_fast"] = ["LxmertTokenizerFast"] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_lxmert"] = [ "LxmertEncoder", "LxmertForPreTraining", "LxmertForQuestionAnswering", "LxmertModel", "LxmertPreTrainedModel", "LxmertVisualFeatureEncoder", "LxmertXLayer", ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_tf_lxmert"] = [ "TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST", "TFLxmertForPreTraining", "TFLxmertMainLayer", "TFLxmertModel", "TFLxmertPreTrainedModel", "TFLxmertVisualFeatureEncoder", ] if TYPE_CHECKING: from .configuration_lxmert import LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP, LxmertConfig from .tokenization_lxmert import LxmertTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_lxmert_fast import LxmertTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_lxmert import ( LxmertEncoder, LxmertForPreTraining, LxmertForQuestionAnswering, LxmertModel, LxmertPreTrainedModel, LxmertVisualFeatureEncoder, LxmertXLayer, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_lxmert import ( TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST, TFLxmertForPreTraining, TFLxmertMainLayer, TFLxmertModel, TFLxmertPreTrainedModel, TFLxmertVisualFeatureEncoder, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
9,510
src/transformers/models/lxmert/configuration_lxmert.py
# coding=utf-8 # Copyright 2018, Hao Tan, Mohit Bansal # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ LXMERT model configuration""" from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP = { "unc-nlp/lxmert-base-uncased": "https://huggingface.co/unc-nlp/lxmert-base-uncased/resolve/main/config.json", } class LxmertConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`LxmertModel`] or a [`TFLxmertModel`]. It is used to instantiate a LXMERT model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the Lxmert [unc-nlp/lxmert-base-uncased](https://huggingface.co/unc-nlp/lxmert-base-uncased) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 30522): Vocabulary size of the LXMERT model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`LxmertModel`] or [`TFLxmertModel`]. hidden_size (`int`, *optional*, defaults to 768): Dimensionality of the encoder layers and the pooler layer. r_layers (`int`, *optional*, defaults to 5): Number of hidden layers in the Transformer visual encoder. l_layers (`int`, *optional*, defaults to 9): Number of hidden layers in the Transformer language encoder. x_layers (`int`, *optional*, defaults to 5): Number of hidden layers in the Transformer cross modality encoder. num_attention_heads (`int`, *optional*, defaults to 5): Number of attention heads for each attention layer in the Transformer encoder. intermediate_size (`int`, *optional*, defaults to 3072): Dimensionality of the "intermediate" (often named feed-forward) layer in the Transformer encoder. hidden_act (`str` or `Callable`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"silu"` and `"gelu_new"` are supported. hidden_dropout_prob (`float`, *optional*, defaults to 0.1): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. attention_probs_dropout_prob (`float`, *optional*, defaults to 0.1): The dropout ratio for the attention probabilities. max_position_embeddings (`int`, *optional*, defaults to 512): The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 512 or 1024 or 2048). type_vocab_size (`int`, *optional*, defaults to 2): The vocabulary size of the *token_type_ids* passed into [`BertModel`]. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. visual_feat_dim (`int`, *optional*, defaults to 2048): This represents the last dimension of the pooled-object features used as input for the model, representing the size of each object feature itself. visual_pos_dim (`int`, *optional*, defaults to 4): This represents the number of spacial features that are mixed into the visual features. The default is set to 4 because most commonly this will represent the location of a bounding box. i.e., (x, y, width, height) visual_loss_normalizer (`float`, *optional*, defaults to 1/15): This represents the scaling factor in which each visual loss is multiplied by if during pretraining, one decided to train with multiple vision-based loss objectives. num_qa_labels (`int`, *optional*, defaults to 9500): This represents the total number of different question answering (QA) labels there are. If using more than one dataset with QA, the user will need to account for the total number of labels that all of the datasets have in total. num_object_labels (`int`, *optional*, defaults to 1600): This represents the total number of semantically unique objects that lxmert will be able to classify a pooled-object feature as belonging too. num_attr_labels (`int`, *optional*, defaults to 400): This represents the total number of semantically unique attributes that lxmert will be able to classify a pooled-object feature as possessing. task_matched (`bool`, *optional*, defaults to `True`): This task is used for sentence-image matching. If the sentence correctly describes the image the label will be 1. If the sentence does not correctly describe the image, the label will be 0. task_mask_lm (`bool`, *optional*, defaults to `True`): Whether or not to add masked language modeling (as used in pretraining models such as BERT) to the loss objective. task_obj_predict (`bool`, *optional*, defaults to `True`): Whether or not to add object prediction, attribute prediction and feature regression to the loss objective. task_qa (`bool`, *optional*, defaults to `True`): Whether or not to add the question-answering loss to the objective visual_obj_loss (`bool`, *optional*, defaults to `True`): Whether or not to calculate the object-prediction loss objective visual_attr_loss (`bool`, *optional*, defaults to `True`): Whether or not to calculate the attribute-prediction loss objective visual_feat_loss (`bool`, *optional*, defaults to `True`): Whether or not to calculate the feature-regression loss objective output_attentions (`bool`, *optional*, defaults to `False`): Whether or not the model should return the attentions from the vision, language, and cross-modality layers should be returned. output_hidden_states (`bool`, *optional*, defaults to `False`): Whether or not the model should return the hidden states from the vision, language, and cross-modality layers should be returned. """ model_type = "lxmert" attribute_map = {} def __init__( self, vocab_size=30522, hidden_size=768, num_attention_heads=12, num_qa_labels=9500, num_object_labels=1600, num_attr_labels=400, intermediate_size=3072, hidden_act="gelu", hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=512, type_vocab_size=2, initializer_range=0.02, layer_norm_eps=1e-12, l_layers=9, x_layers=5, r_layers=5, visual_feat_dim=2048, visual_pos_dim=4, visual_loss_normalizer=6.67, task_matched=True, task_mask_lm=True, task_obj_predict=True, task_qa=True, visual_obj_loss=True, visual_attr_loss=True, visual_feat_loss=True, **kwargs, ): self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_attention_heads = num_attention_heads self.hidden_act = hidden_act self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.type_vocab_size = type_vocab_size self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.num_qa_labels = num_qa_labels self.num_object_labels = num_object_labels self.num_attr_labels = num_attr_labels self.l_layers = l_layers self.x_layers = x_layers self.r_layers = r_layers self.visual_feat_dim = visual_feat_dim self.visual_pos_dim = visual_pos_dim self.visual_loss_normalizer = visual_loss_normalizer self.task_matched = task_matched self.task_mask_lm = task_mask_lm self.task_obj_predict = task_obj_predict self.task_qa = task_qa self.visual_obj_loss = visual_obj_loss self.visual_attr_loss = visual_attr_loss self.visual_feat_loss = visual_feat_loss self.num_hidden_layers = {"vision": r_layers, "cross_encoder": x_layers, "language": l_layers} super().__init__(**kwargs)
27182812/ChatGLM-LLaMA-chinese-insturct
64,975
src/transformers/models/lxmert/modeling_lxmert.py
# coding=utf-8 # Copyright 2018 Hao Tan, Mohit Bansal, and the HuggingFace team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch LXMERT model.""" import math import os import warnings from dataclasses import dataclass from typing import Dict, Optional, Tuple, Union import torch from torch import nn from torch.nn import CrossEntropyLoss, SmoothL1Loss from ...activations import ACT2FN, gelu from ...modeling_utils import PreTrainedModel from ...utils import ( ModelOutput, add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_lxmert import LxmertConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "unc-nlp/lxmert-base-uncased" _CONFIG_FOR_DOC = "LxmertConfig" LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ "unc-nlp/lxmert-base-uncased", ] class GeLU(nn.Module): def __init__(self): super().__init__() def forward(self, x): return gelu(x) @dataclass class LxmertModelOutput(ModelOutput): """ Lxmert's outputs that contain the last hidden states, pooled outputs, and attention probabilities for the language, visual, and, cross-modality encoders. (note: the visual encoder in Lxmert is referred to as the "relation-ship" encoder") Args: language_output (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the language encoder. vision_output (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the visual encoder. pooled_output (`torch.FloatTensor` of shape `(batch_size, hidden_size)`): Last layer hidden-state of the first token of the sequence (classification, CLS, token) further processed by a Linear layer and a Tanh activation function. The Linear language_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. vision_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. language_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. vision_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. cross_encoder_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ language_output: Optional[torch.FloatTensor] = None vision_output: Optional[torch.FloatTensor] = None pooled_output: Optional[torch.FloatTensor] = None language_hidden_states: Optional[Tuple[torch.FloatTensor]] = None vision_hidden_states: Optional[Tuple[torch.FloatTensor]] = None language_attentions: Optional[Tuple[torch.FloatTensor]] = None vision_attentions: Optional[Tuple[torch.FloatTensor]] = None cross_encoder_attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class LxmertForQuestionAnsweringOutput(ModelOutput): """ Output type of [`LxmertForQuestionAnswering`]. Args: loss (*optional*, returned when `labels` is provided, `torch.FloatTensor` of shape `(1,)`): Total loss as the sum of the masked language modeling loss and the next sequence prediction (classification) loss.k. question_answering_score: (`torch.FloatTensor` of shape `(batch_size, n_qa_answers)`, *optional*): Prediction scores of question answering objective (classification). language_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. vision_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. language_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. vision_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. cross_encoder_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None question_answering_score: Optional[torch.FloatTensor] = None language_hidden_states: Optional[Tuple[torch.FloatTensor]] = None vision_hidden_states: Optional[Tuple[torch.FloatTensor]] = None language_attentions: Optional[Tuple[torch.FloatTensor]] = None vision_attentions: Optional[Tuple[torch.FloatTensor]] = None cross_encoder_attentions: Optional[Tuple[torch.FloatTensor]] = None @dataclass class LxmertForPreTrainingOutput(ModelOutput): """ Output type of [`LxmertForPreTraining`]. Args: loss (*optional*, returned when `labels` is provided, `torch.FloatTensor` of shape `(1,)`): Total loss as the sum of the masked language modeling loss and the next sequence prediction (classification) loss. prediction_logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). cross_relationship_score: (`torch.FloatTensor` of shape `(batch_size, 2)`): Prediction scores of the textual matching objective (classification) head (scores of True/False continuation before SoftMax). question_answering_score: (`torch.FloatTensor` of shape `(batch_size, n_qa_answers)`): Prediction scores of question answering objective (classification). language_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. vision_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for input features + one for the output of each cross-modality layer) of shape `(batch_size, sequence_length, hidden_size)`. language_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. vision_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. cross_encoder_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. """ loss: Optional[torch.FloatTensor] = None prediction_logits: Optional[torch.FloatTensor] = None cross_relationship_score: Optional[torch.FloatTensor] = None question_answering_score: Optional[torch.FloatTensor] = None language_hidden_states: Optional[Tuple[torch.FloatTensor]] = None vision_hidden_states: Optional[Tuple[torch.FloatTensor]] = None language_attentions: Optional[Tuple[torch.FloatTensor]] = None vision_attentions: Optional[Tuple[torch.FloatTensor]] = None cross_encoder_attentions: Optional[Tuple[torch.FloatTensor]] = None def load_tf_weights_in_lxmert(model, config, tf_checkpoint_path): """Load tf checkpoints in a pytorch model.""" try: import re import numpy as np import tensorflow as tf except ImportError: logger.error( "Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see " "https://www.tensorflow.org/install/ for installation instructions." ) raise tf_path = os.path.abspath(tf_checkpoint_path) logger.info(f"Converting TensorFlow checkpoint from {tf_path}") # Load weights from TF model init_vars = tf.train.list_variables(tf_path) names = [] arrays = [] for name, shape in init_vars: logger.info(f"Loading TF weight {name} with shape {shape}") array = tf.train.load_variable(tf_path, name) names.append(name) arrays.append(array) for name, array in zip(names, arrays): name = name.split("/") # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model if any( n in [ "adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step", ] for n in name ): logger.info(f"Skipping {'/'.join(name)}") continue pointer = model for m_name in name: if re.fullmatch(r"[A-Za-z]+_\d+", m_name): scope_names = re.split(r"_(\d+)", m_name) else: scope_names = [m_name] if scope_names[0] == "kernel" or scope_names[0] == "gamma": pointer = getattr(pointer, "weight") elif scope_names[0] == "output_bias" or scope_names[0] == "beta": pointer = getattr(pointer, "bias") elif scope_names[0] == "output_weights": pointer = getattr(pointer, "weight") elif scope_names[0] == "squad": pointer = getattr(pointer, "classifier") else: try: pointer = getattr(pointer, scope_names[0]) except AttributeError: logger.info(f"Skipping {'/'.join(name)}") continue if len(scope_names) >= 2: num = int(scope_names[1]) pointer = pointer[num] if m_name[-11:] == "_embeddings": pointer = getattr(pointer, "weight") elif m_name == "kernel": array = np.transpose(array) try: assert pointer.shape == array.shape except AssertionError as e: e.args += (pointer.shape, array.shape) raise logger.info(f"Initialize PyTorch weight {name}") pointer.data = torch.from_numpy(array) return model class LxmertEmbeddings(nn.Module): """Construct the embeddings from word, position and token_type embeddings.""" def __init__(self, config): super().__init__() self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=0) self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size, padding_idx=0) self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size, padding_idx=0) # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load # any TensorFlow checkpoint file self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=1e-12) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, input_ids, token_type_ids=None, inputs_embeds=None): if input_ids is not None: input_shape = input_ids.size() device = input_ids.device else: input_shape = inputs_embeds.size()[:-1] device = inputs_embeds.device seq_length = input_shape[1] position_ids = torch.arange(seq_length, dtype=torch.long, device=device) position_ids = position_ids.unsqueeze(0).expand(input_shape) if token_type_ids is None: token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device) if inputs_embeds is None: inputs_embeds = self.word_embeddings(input_ids) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = inputs_embeds + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings class LxmertAttention(nn.Module): def __init__(self, config, ctx_dim=None): super().__init__() if config.hidden_size % config.num_attention_heads != 0: raise ValueError( f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention " f"heads ({config.num_attention_heads})" ) self.num_attention_heads = config.num_attention_heads self.attention_head_size = int(config.hidden_size / config.num_attention_heads) self.head_size = self.num_attention_heads * self.attention_head_size # visual_dim = 2048 if ctx_dim is None: ctx_dim = config.hidden_size self.query = nn.Linear(config.hidden_size, self.head_size) self.key = nn.Linear(ctx_dim, self.head_size) self.value = nn.Linear(ctx_dim, self.head_size) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) def transpose_for_scores(self, x): new_x_shape = x.size()[:-1] + ( self.num_attention_heads, self.attention_head_size, ) x = x.view(new_x_shape) return x.permute(0, 2, 1, 3) def forward(self, hidden_states, context, attention_mask=None, output_attentions=False): mixed_query_layer = self.query(hidden_states) mixed_key_layer = self.key(context) mixed_value_layer = self.value(context) query_layer = self.transpose_for_scores(mixed_query_layer) key_layer = self.transpose_for_scores(mixed_key_layer) value_layer = self.transpose_for_scores(mixed_value_layer) # Take the dot product between "query" and "key" to get the raw attention scores. attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) attention_scores = attention_scores / math.sqrt(self.attention_head_size) # Apply the attention mask is (precomputed for all layers in BertModel forward() function) if attention_mask is not None: attention_scores = attention_scores + attention_mask # Normalize the attention scores to probabilities. attention_probs = nn.functional.softmax(attention_scores, dim=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs) context_layer = torch.matmul(attention_probs, value_layer) context_layer = context_layer.permute(0, 2, 1, 3).contiguous() new_context_layer_shape = context_layer.size()[:-2] + (self.head_size,) context_layer = context_layer.view(new_context_layer_shape) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) return outputs class LxmertAttentionOutput(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=1e-12) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states, input_tensor): hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class LxmertCrossAttentionLayer(nn.Module): def __init__(self, config): super().__init__() self.att = LxmertAttention(config) self.output = LxmertAttentionOutput(config) def forward(self, input_tensor, ctx_tensor, ctx_att_mask=None, output_attentions=False): output = self.att(input_tensor, ctx_tensor, ctx_att_mask, output_attentions=output_attentions) if output_attentions: attention_probs = output[1] attention_output = self.output(output[0], input_tensor) outputs = (attention_output, attention_probs) if output_attentions else (attention_output,) return outputs class LxmertSelfAttentionLayer(nn.Module): def __init__(self, config): super().__init__() self.self = LxmertAttention(config) self.output = LxmertAttentionOutput(config) def forward(self, input_tensor, attention_mask, output_attentions=False): # Self attention attends to itself, thus keys and queries are the same (input_tensor). output = self.self( input_tensor, input_tensor, attention_mask, output_attentions=output_attentions, ) if output_attentions: attention_probs = output[1] attention_output = self.output(output[0], input_tensor) outputs = (attention_output, attention_probs) if output_attentions else (attention_output,) return outputs class LxmertIntermediate(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.intermediate_size) self.intermediate_act_fn = ACT2FN[config.hidden_act] def forward(self, hidden_states): hidden_states = self.dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states class LxmertOutput(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.intermediate_size, config.hidden_size) self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=1e-12) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states, input_tensor): hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class LxmertLayer(nn.Module): def __init__(self, config): super().__init__() self.attention = LxmertSelfAttentionLayer(config) self.intermediate = LxmertIntermediate(config) self.output = LxmertOutput(config) def forward(self, hidden_states, attention_mask=None, output_attentions=False): outputs = self.attention(hidden_states, attention_mask, output_attentions=output_attentions) attention_output = outputs[0] intermediate_output = self.intermediate(attention_output) layer_output = self.output(intermediate_output, attention_output) outputs = (layer_output,) + outputs[1:] # add attentions if we output them return outputs class LxmertXLayer(nn.Module): def __init__(self, config): super().__init__() # The cross-attention Layer self.visual_attention = LxmertCrossAttentionLayer(config) # Self-attention Layers self.lang_self_att = LxmertSelfAttentionLayer(config) self.visn_self_att = LxmertSelfAttentionLayer(config) # Intermediate and Output Layers (FFNs) self.lang_inter = LxmertIntermediate(config) self.lang_output = LxmertOutput(config) self.visn_inter = LxmertIntermediate(config) self.visn_output = LxmertOutput(config) def cross_att( self, lang_input, lang_attention_mask, visual_input, visual_attention_mask, output_x_attentions=False, ): # Cross Attention lang_att_output = self.visual_attention( lang_input, visual_input, ctx_att_mask=visual_attention_mask, output_attentions=output_x_attentions, ) visual_att_output = self.visual_attention( visual_input, lang_input, ctx_att_mask=lang_attention_mask, output_attentions=False, ) return lang_att_output, visual_att_output def self_att(self, lang_input, lang_attention_mask, visual_input, visual_attention_mask): # Self Attention lang_att_output = self.lang_self_att(lang_input, lang_attention_mask, output_attentions=False) visual_att_output = self.visn_self_att(visual_input, visual_attention_mask, output_attentions=False) return lang_att_output[0], visual_att_output[0] def output_fc(self, lang_input, visual_input): # FC layers lang_inter_output = self.lang_inter(lang_input) visual_inter_output = self.visn_inter(visual_input) # Layer output lang_output = self.lang_output(lang_inter_output, lang_input) visual_output = self.visn_output(visual_inter_output, visual_input) return lang_output, visual_output def forward( self, lang_feats, lang_attention_mask, visual_feats, visual_attention_mask, output_attentions=False, ): lang_att_output, visual_att_output = self.cross_att( lang_input=lang_feats, lang_attention_mask=lang_attention_mask, visual_input=visual_feats, visual_attention_mask=visual_attention_mask, output_x_attentions=output_attentions, ) attention_probs = lang_att_output[1:] lang_att_output, visual_att_output = self.self_att( lang_att_output[0], lang_attention_mask, visual_att_output[0], visual_attention_mask, ) lang_output, visual_output = self.output_fc(lang_att_output, visual_att_output) return ( ( lang_output, visual_output, attention_probs[0], ) if output_attentions else (lang_output, visual_output) ) class LxmertVisualFeatureEncoder(nn.Module): def __init__(self, config): super().__init__() feat_dim = config.visual_feat_dim pos_dim = config.visual_pos_dim # Object feature encoding self.visn_fc = nn.Linear(feat_dim, config.hidden_size) self.visn_layer_norm = nn.LayerNorm(config.hidden_size, eps=1e-12) # Box position encoding self.box_fc = nn.Linear(pos_dim, config.hidden_size) self.box_layer_norm = nn.LayerNorm(config.hidden_size, eps=1e-12) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, visual_feats, visual_pos): x = self.visn_fc(visual_feats) x = self.visn_layer_norm(x) y = self.box_fc(visual_pos) y = self.box_layer_norm(y) output = (x + y) / 2 output = self.dropout(output) return output class LxmertEncoder(nn.Module): def __init__(self, config): super().__init__() # Obj-level image embedding layer self.visn_fc = LxmertVisualFeatureEncoder(config) self.config = config # Number of layers self.num_l_layers = config.l_layers self.num_x_layers = config.x_layers self.num_r_layers = config.r_layers # Layers # Using self.layer instead of self.l_layer to support loading BERT weights. self.layer = nn.ModuleList([LxmertLayer(config) for _ in range(self.num_l_layers)]) self.x_layers = nn.ModuleList([LxmertXLayer(config) for _ in range(self.num_x_layers)]) self.r_layers = nn.ModuleList([LxmertLayer(config) for _ in range(self.num_r_layers)]) def forward( self, lang_feats, lang_attention_mask, visual_feats, visual_pos, visual_attention_mask=None, output_attentions=None, ): vision_hidden_states = () language_hidden_states = () vision_attentions = () if output_attentions or self.config.output_attentions else None language_attentions = () if output_attentions or self.config.output_attentions else None cross_encoder_attentions = () if output_attentions or self.config.output_attentions else None visual_feats = self.visn_fc(visual_feats, visual_pos) # Run language layers for layer_module in self.layer: l_outputs = layer_module(lang_feats, lang_attention_mask, output_attentions=output_attentions) lang_feats = l_outputs[0] language_hidden_states = language_hidden_states + (lang_feats,) if language_attentions is not None: language_attentions = language_attentions + (l_outputs[1],) # Run relational layers for layer_module in self.r_layers: v_outputs = layer_module(visual_feats, visual_attention_mask, output_attentions=output_attentions) visual_feats = v_outputs[0] vision_hidden_states = vision_hidden_states + (visual_feats,) if vision_attentions is not None: vision_attentions = vision_attentions + (v_outputs[1],) # Run cross-modality layers for layer_module in self.x_layers: x_outputs = layer_module( lang_feats, lang_attention_mask, visual_feats, visual_attention_mask, output_attentions=output_attentions, ) lang_feats, visual_feats = x_outputs[:2] vision_hidden_states = vision_hidden_states + (visual_feats,) language_hidden_states = language_hidden_states + (lang_feats,) if cross_encoder_attentions is not None: cross_encoder_attentions = cross_encoder_attentions + (x_outputs[2],) visual_encoder_outputs = ( vision_hidden_states, vision_attentions if output_attentions else None, ) lang_encoder_outputs = ( language_hidden_states, language_attentions if output_attentions else None, ) return ( visual_encoder_outputs, lang_encoder_outputs, cross_encoder_attentions if output_attentions else None, ) class LxmertPooler(nn.Module): def __init__(self, config): super(LxmertPooler, self).__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.activation = nn.Tanh() def forward(self, hidden_states): # We "pool" the model by simply taking the hidden state corresponding # to the first token. first_token_tensor = hidden_states[:, 0] pooled_output = self.dense(first_token_tensor) pooled_output = self.activation(pooled_output) return pooled_output class LxmertPredictionHeadTransform(nn.Module): def __init__(self, config): super(LxmertPredictionHeadTransform, self).__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.transform_act_fn = ACT2FN[config.hidden_act] self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=1e-12) def forward(self, hidden_states): hidden_states = self.dense(hidden_states) hidden_states = self.transform_act_fn(hidden_states) hidden_states = self.LayerNorm(hidden_states) return hidden_states class LxmertLMPredictionHead(nn.Module): def __init__(self, config, lxmert_model_embedding_weights): super(LxmertLMPredictionHead, self).__init__() self.transform = LxmertPredictionHeadTransform(config) # The output weights are the same as the input embeddings, but there is # an output-only bias for each token. self.decoder = nn.Linear( lxmert_model_embedding_weights.size(1), lxmert_model_embedding_weights.size(0), bias=False, ) self.decoder.weight = lxmert_model_embedding_weights self.bias = nn.Parameter(torch.zeros(lxmert_model_embedding_weights.size(0))) def forward(self, hidden_states): hidden_states = self.transform(hidden_states) hidden_states = self.decoder(hidden_states) + self.bias return hidden_states class LxmertVisualAnswerHead(nn.Module): def __init__(self, config, num_labels): super().__init__() hid_dim = config.hidden_size self.logit_fc = nn.Sequential( nn.Linear(hid_dim, hid_dim * 2), GeLU(), nn.LayerNorm(hid_dim * 2, eps=1e-12), nn.Linear(hid_dim * 2, num_labels), ) def forward(self, hidden_states): return self.logit_fc(hidden_states) class LxmertVisualObjHead(nn.Module): def __init__(self, config): super().__init__() self.transform = LxmertPredictionHeadTransform(config) # Decide the use of visual losses visual_losses = {} if config.visual_obj_loss: visual_losses["obj"] = {"shape": (-1,), "num": config.num_object_labels} if config.visual_attr_loss: visual_losses["attr"] = {"shape": (-1,), "num": config.num_attr_labels} if config.visual_feat_loss: visual_losses["feat"] = { "shape": (-1, config.visual_feat_dim), "num": config.visual_feat_dim, } self.visual_losses = visual_losses # The output weights are the same as the input embeddings, but there is # an output-only bias for each token. self.decoder_dict = nn.ModuleDict( {key: nn.Linear(config.hidden_size, self.visual_losses[key]["num"]) for key in self.visual_losses} ) def forward(self, hidden_states): hidden_states = self.transform(hidden_states) output = {} for key in self.visual_losses: output[key] = self.decoder_dict[key](hidden_states) return output class LxmertPreTrainingHeads(nn.Module): def __init__(self, config, lxmert_model_embedding_weights): super(LxmertPreTrainingHeads, self).__init__() self.predictions = LxmertLMPredictionHead(config, lxmert_model_embedding_weights) self.seq_relationship = nn.Linear(config.hidden_size, 2) def forward(self, sequence_output, pooled_output): prediction_scores = self.predictions(sequence_output) seq_relationship_score = self.seq_relationship(pooled_output) return prediction_scores, seq_relationship_score class LxmertPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = LxmertConfig load_tf_weights = load_tf_weights_in_lxmert base_model_prefix = "lxmert" def _init_weights(self, module): """Initialize the weights""" if isinstance(module, nn.Linear): # Slightly different from the TF version which uses truncated_normal for initialization # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() elif isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) LXMERT_START_DOCSTRING = r""" The LXMERT model was proposed in [LXMERT: Learning Cross-Modality Encoder Representations from Transformers](https://arxiv.org/abs/1908.07490) by Hao Tan and Mohit Bansal. It's a vision and language transformer model, pretrained on a variety of multi-modal datasets comprising of GQA, VQAv2.0, MSCOCO captions, and Visual genome, using a combination of masked language modeling, region of interest feature regression, cross entropy loss for question answering attribute prediction, and object tag prediction. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`LxmertConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ LXMERT_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) visual_feats: (`torch.FloatTensor` of shape `(batch_size, num_visual_features, visual_feat_dim)`): This input represents visual features. They ROI pooled object features from bounding boxes using a faster-RCNN model) These are currently not provided by the transformers library. visual_pos: (`torch.FloatTensor` of shape `(batch_size, num_visual_features, visual_pos_dim)`): This input represents spacial features corresponding to their relative (via index) visual features. The pre-trained LXMERT model expects these spacial features to be normalized bounding boxes on a scale of 0 to 1. These are currently not provided by the transformers library. attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) visual_attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare Lxmert Model transformer outputting raw hidden-states without any specific head on top.", LXMERT_START_DOCSTRING, ) class LxmertModel(LxmertPreTrainedModel): def __init__(self, config): super().__init__(config) self.embeddings = LxmertEmbeddings(config) self.encoder = LxmertEncoder(config) self.pooler = LxmertPooler(config) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, new_embeddings): self.embeddings.word_embeddings = new_embeddings @add_start_docstrings_to_model_forward(LXMERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=LxmertModelOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, visual_feats: Optional[torch.FloatTensor] = None, visual_pos: Optional[torch.FloatTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, visual_attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[LxmertModelOutput, Tuple[torch.FloatTensor]]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if visual_feats is None: raise ValueError("`visual_feats` cannot be `None`") if visual_pos is None: raise ValueError("`visual_pos` cannot be `None`") device = input_ids.device if input_ids is not None else inputs_embeds.device if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) if token_type_ids is None: token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device) # We create a 3D attention mask from a 2D tensor mask. # Sizes are [batch_size, 1, 1, to_seq_length] # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] # this attention mask is more simple than the triangular masking of causal attention # used in OpenAI GPT, we just need to prepare the broadcast dimension here. extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) # Since attention_mask is 1.0 for positions we want to attend and 0.0 for # masked positions, this operation will create a tensor which is 0.0 for # positions we want to attend and the dtype's smallest value for masked positions. # Since we are adding it to the raw scores before the softmax, this is # effectively the same as removing these entirely. extended_attention_mask = extended_attention_mask.to(dtype=self.dtype) extended_attention_mask = (1.0 - extended_attention_mask) * torch.finfo(self.dtype).min # Process the visual attention mask if visual_attention_mask is not None: extended_visual_attention_mask = visual_attention_mask.unsqueeze(1).unsqueeze(2) extended_visual_attention_mask = extended_visual_attention_mask.to(dtype=self.dtype) extended_visual_attention_mask = (1.0 - extended_visual_attention_mask) * torch.finfo(self.dtype).min else: extended_visual_attention_mask = None # Positional Word Embeddings embedding_output = self.embeddings(input_ids, token_type_ids, inputs_embeds) # Run Lxmert encoder encoder_outputs = self.encoder( embedding_output, extended_attention_mask, visual_feats=visual_feats, visual_pos=visual_pos, visual_attention_mask=extended_visual_attention_mask, output_attentions=output_attentions, ) visual_encoder_outputs, lang_encoder_outputs = encoder_outputs[:2] vision_hidden_states = visual_encoder_outputs[0] language_hidden_states = lang_encoder_outputs[0] all_attentions = () if output_attentions: language_attentions = lang_encoder_outputs[1] vision_attentions = visual_encoder_outputs[1] cross_encoder_attentions = encoder_outputs[2] all_attentions = ( language_attentions, vision_attentions, cross_encoder_attentions, ) hidden_states = (language_hidden_states, vision_hidden_states) if output_hidden_states else () visual_output = vision_hidden_states[-1] lang_output = language_hidden_states[-1] pooled_output = self.pooler(lang_output) if not return_dict: return (lang_output, visual_output, pooled_output) + hidden_states + all_attentions return LxmertModelOutput( pooled_output=pooled_output, language_output=lang_output, vision_output=visual_output, language_hidden_states=language_hidden_states if output_hidden_states else None, vision_hidden_states=vision_hidden_states if output_hidden_states else None, language_attentions=language_attentions if output_attentions else None, vision_attentions=vision_attentions if output_attentions else None, cross_encoder_attentions=cross_encoder_attentions if output_attentions else None, ) @add_start_docstrings( """Lxmert Model with a specified pretraining head on top.""", LXMERT_START_DOCSTRING, ) class LxmertForPreTraining(LxmertPreTrainedModel): _keys_to_ignore_on_load_missing = ["cls.predictions.decoder.weight"] def __init__(self, config): super().__init__(config) # Configuration self.config = config self.num_qa_labels = config.num_qa_labels self.visual_loss_normalizer = config.visual_loss_normalizer # Use of pretraining tasks self.task_mask_lm = config.task_mask_lm self.task_obj_predict = config.task_obj_predict self.task_matched = config.task_matched self.task_qa = config.task_qa # Lxmert backbone self.lxmert = LxmertModel(config) # Pre-training heads self.cls = LxmertPreTrainingHeads(config, self.lxmert.embeddings.word_embeddings.weight) if self.task_obj_predict: self.obj_predict_head = LxmertVisualObjHead(config) if self.task_qa: self.answer_head = LxmertVisualAnswerHead(config, self.num_qa_labels) # Weight initialization # Initialize weights and apply final processing self.post_init() # Loss functions self.loss_fcts = { "l2": SmoothL1Loss(reduction="none"), "visual_ce": CrossEntropyLoss(reduction="none"), "ce": CrossEntropyLoss(), } visual_losses = {} if config.visual_obj_loss: visual_losses["obj"] = { "shape": (-1,), "num": config.num_object_labels, "loss": "visual_ce", } if config.visual_attr_loss: visual_losses["attr"] = { "shape": (-1,), "num": config.num_attr_labels, "loss": "visual_ce", } if config.visual_feat_loss: visual_losses["feat"] = { "shape": (-1, config.visual_feat_dim), "num": config.visual_feat_dim, "loss": "l2", } self.visual_losses = visual_losses def resize_num_qa_labels(self, num_labels): """ Build a resized question answering linear layer Module from a provided new linear layer. Increasing the size will add newly initialized weights. Reducing the size will remove weights from the end Args: num_labels (`int`, *optional*): New number of labels in the linear layer weight matrix. Increasing the size will add newly initialized weights at the end. Reducing the size will remove weights from the end. If not provided or `None`, just returns a pointer to the qa labels ``torch.nn.Linear``` module of the model without doing anything. Return: `torch.nn.Linear`: Pointer to the resized Linear layer or the old Linear layer """ cur_qa_logit_layer = self.get_qa_logit_layer() if num_labels is None or cur_qa_logit_layer is None: return new_qa_logit_layer = self._resize_qa_labels(num_labels) self.config.num_qa_labels = num_labels self.num_qa_labels = num_labels return new_qa_logit_layer def _resize_qa_labels(self, num_labels): cur_qa_logit_layer = self.get_qa_logit_layer() new_qa_logit_layer = self._get_resized_qa_labels(cur_qa_logit_layer, num_labels) self._set_qa_logit_layer(new_qa_logit_layer) return self.get_qa_logit_layer() def get_qa_logit_layer(self) -> nn.Module: """ Returns the linear layer that produces question answering logits. Returns: `nn.Module`: A torch module mapping the question answering prediction hidden states or `None` if LXMERT does not have a visual answering head. """ if hasattr(self, "answer_head"): return self.answer_head.logit_fc[-1] def _set_qa_logit_layer(self, qa_logit_layer): self.answer_head.logit_fc[-1] = qa_logit_layer def _get_resized_qa_labels(self, cur_qa_logit_layer, num_labels): if num_labels is None: return cur_qa_logit_layer cur_qa_labels, hidden_dim = cur_qa_logit_layer.weight.size() if cur_qa_labels == num_labels: return cur_qa_logit_layer # Build new linear output if getattr(cur_qa_logit_layer, "bias", None) is not None: new_qa_logit_layer = nn.Linear(hidden_dim, num_labels) else: new_qa_logit_layer = nn.Linear(hidden_dim, num_labels, bias=False) new_qa_logit_layer.to(cur_qa_logit_layer.weight.device) # initialize all new labels self._init_weights(new_qa_logit_layer) # Copy labels from the previous weights num_labels_to_copy = min(cur_qa_labels, num_labels) new_qa_logit_layer.weight.data[:num_labels_to_copy, :] = cur_qa_logit_layer.weight.data[:num_labels_to_copy, :] if getattr(cur_qa_logit_layer, "bias", None) is not None: new_qa_logit_layer.bias.data[:num_labels_to_copy] = cur_qa_logit_layer.bias.data[:num_labels_to_copy] return new_qa_logit_layer @add_start_docstrings_to_model_forward(LXMERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @replace_return_docstrings(output_type=LxmertForPreTrainingOutput, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: Optional[torch.LongTensor] = None, visual_feats: Optional[torch.FloatTensor] = None, visual_pos: Optional[torch.FloatTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, visual_attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, obj_labels: Optional[Dict[str, Tuple[torch.FloatTensor, torch.FloatTensor]]] = None, matched_label: Optional[torch.LongTensor] = None, ans: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, **kwargs, ) -> Union[LxmertForPreTrainingOutput, Tuple[torch.FloatTensor]]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]` obj_labels: (`Dict[Str: Tuple[Torch.FloatTensor, Torch.FloatTensor]]`, *optional*): each key is named after each one of the visual losses and each element of the tuple is of the shape `(batch_size, num_features)` and `(batch_size, num_features, visual_feature_dim)` for each the label id and the label score respectively matched_label (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the whether or not the text input matches the image (classification) loss. Input should be a sequence pair (see `input_ids` docstring) Indices should be in `[0, 1]`: - 0 indicates that the sentence does not match the image, - 1 indicates that the sentence does match the image. ans (`Torch.Tensor` of shape `(batch_size)`, *optional*): a one hot representation hof the correct answer *optional* Returns: """ if "masked_lm_labels" in kwargs: warnings.warn( "The `masked_lm_labels` argument is deprecated and will be removed in a future version, use `labels`" " instead.", FutureWarning, ) labels = kwargs.pop("masked_lm_labels") return_dict = return_dict if return_dict is not None else self.config.use_return_dict device = input_ids.device if input_ids is not None else inputs_embeds.device lxmert_output = self.lxmert( input_ids=input_ids, visual_feats=visual_feats, visual_pos=visual_pos, token_type_ids=token_type_ids, attention_mask=attention_mask, visual_attention_mask=visual_attention_mask, inputs_embeds=inputs_embeds, output_hidden_states=output_hidden_states, output_attentions=output_attentions, return_dict=return_dict, ) lang_output, visual_output, pooled_output = ( lxmert_output[0], lxmert_output[1], lxmert_output[2], ) lang_prediction_scores, cross_relationship_score = self.cls(lang_output, pooled_output) if self.task_qa: answer_score = self.answer_head(pooled_output) else: answer_score = pooled_output[0][0] total_loss = ( None if (labels is None and matched_label is None and obj_labels is None and ans is None) else torch.tensor(0.0, device=device) ) if labels is not None and self.task_mask_lm: masked_lm_loss = self.loss_fcts["ce"]( lang_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1), ) total_loss += masked_lm_loss if matched_label is not None and self.task_matched: matched_loss = self.loss_fcts["ce"](cross_relationship_score.view(-1, 2), matched_label.view(-1)) total_loss += matched_loss if obj_labels is not None and self.task_obj_predict: total_visual_loss = torch.tensor(0.0, device=input_ids.device) visual_prediction_scores_dict = self.obj_predict_head(visual_output) for key, key_info in self.visual_losses.items(): label, mask_conf = obj_labels[key] output_dim = key_info["num"] loss_fct_name = key_info["loss"] label_shape = key_info["shape"] weight = self.visual_loss_normalizer visual_loss_fct = self.loss_fcts[loss_fct_name] visual_prediction_scores = visual_prediction_scores_dict[key] visual_loss = visual_loss_fct( visual_prediction_scores.view(-1, output_dim), label.view(label_shape), ) if visual_loss.dim() > 1: # Regression Losses visual_loss = visual_loss.mean(1) visual_loss = (visual_loss * mask_conf.view(-1)).mean() * weight total_visual_loss += visual_loss total_loss += total_visual_loss if ans is not None and self.task_qa: answer_loss = self.loss_fcts["ce"](answer_score.view(-1, self.num_qa_labels), ans.view(-1)) total_loss += answer_loss if not return_dict: output = ( lang_prediction_scores, cross_relationship_score, answer_score, ) + lxmert_output[3:] return ((total_loss,) + output) if total_loss is not None else output return LxmertForPreTrainingOutput( loss=total_loss, prediction_logits=lang_prediction_scores, cross_relationship_score=cross_relationship_score, question_answering_score=answer_score, language_hidden_states=lxmert_output.language_hidden_states, vision_hidden_states=lxmert_output.vision_hidden_states, language_attentions=lxmert_output.language_attentions, vision_attentions=lxmert_output.vision_attentions, cross_encoder_attentions=lxmert_output.cross_encoder_attentions, ) @add_start_docstrings( """Lxmert Model with a visual-answering head on top for downstream QA tasks""", LXMERT_START_DOCSTRING, ) class LxmertForQuestionAnswering(LxmertPreTrainedModel): def __init__(self, config): super().__init__(config) # Configuration self.config = config self.num_qa_labels = config.num_qa_labels self.visual_loss_normalizer = config.visual_loss_normalizer # Lxmert backbone self.lxmert = LxmertModel(config) self.answer_head = LxmertVisualAnswerHead(config, self.num_qa_labels) # Weight initialization # Initialize weights and apply final processing self.post_init() # Loss function self.loss = CrossEntropyLoss() def resize_num_qa_labels(self, num_labels): """ Build a resized question answering linear layer Module from a provided new linear layer. Increasing the size will add newly initialized weights. Reducing the size will remove weights from the end Args: num_labels (`int`, *optional*): New number of labels in the linear layer weight matrix. Increasing the size will add newly initialized weights at the end. Reducing the size will remove weights from the end. If not provided or `None`, just returns a pointer to the qa labels ``torch.nn.Linear``` module of the model without doing anything. Return: `torch.nn.Linear`: Pointer to the resized Linear layer or the old Linear layer """ cur_qa_logit_layer = self.get_qa_logit_layer() if num_labels is None or cur_qa_logit_layer is None: return new_qa_logit_layer = self._resize_qa_labels(num_labels) self.config.num_qa_labels = num_labels self.num_qa_labels = num_labels return new_qa_logit_layer def _resize_qa_labels(self, num_labels): cur_qa_logit_layer = self.get_qa_logit_layer() new_qa_logit_layer = self._get_resized_qa_labels(cur_qa_logit_layer, num_labels) self._set_qa_logit_layer(new_qa_logit_layer) return self.get_qa_logit_layer() def get_qa_logit_layer(self) -> nn.Module: """ Returns the linear layer that produces question answering logits Returns: `nn.Module`: A torch module mapping the question answering prediction hidden states. `None`: A NoneType object if Lxmert does not have the visual answering head. """ if hasattr(self, "answer_head"): return self.answer_head.logit_fc[-1] def _set_qa_logit_layer(self, qa_logit_layer): self.answer_head.logit_fc[-1] = qa_logit_layer def _get_resized_qa_labels(self, cur_qa_logit_layer, num_labels): if num_labels is None: return cur_qa_logit_layer cur_qa_labels, hidden_dim = cur_qa_logit_layer.weight.size() if cur_qa_labels == num_labels: return cur_qa_logit_layer # Build new linear output if getattr(cur_qa_logit_layer, "bias", None) is not None: new_qa_logit_layer = nn.Linear(hidden_dim, num_labels) else: new_qa_logit_layer = nn.Linear(hidden_dim, num_labels, bias=False) new_qa_logit_layer.to(cur_qa_logit_layer.weight.device) # initialize all new labels self._init_weights(new_qa_logit_layer) # Copy labels from the previous weights num_labels_to_copy = min(cur_qa_labels, num_labels) new_qa_logit_layer.weight.data[:num_labels_to_copy, :] = cur_qa_logit_layer.weight.data[:num_labels_to_copy, :] if getattr(cur_qa_logit_layer, "bias", None) is not None: new_qa_logit_layer.bias.data[:num_labels_to_copy] = cur_qa_logit_layer.bias.data[:num_labels_to_copy] return new_qa_logit_layer @add_start_docstrings_to_model_forward(LXMERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=LxmertForQuestionAnsweringOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, visual_feats: Optional[torch.FloatTensor] = None, visual_pos: Optional[torch.FloatTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, visual_attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[LxmertForQuestionAnsweringOutput, Tuple[torch.FloatTensor]]: r""" labels: (`Torch.Tensor` of shape `(batch_size)`, *optional*): A one-hot representation of the correct answer """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict lxmert_output = self.lxmert( input_ids=input_ids, visual_feats=visual_feats, visual_pos=visual_pos, token_type_ids=token_type_ids, attention_mask=attention_mask, visual_attention_mask=visual_attention_mask, inputs_embeds=inputs_embeds, output_hidden_states=output_hidden_states, output_attentions=output_attentions, return_dict=return_dict, ) pooled_output = lxmert_output[2] answer_score = self.answer_head(pooled_output) loss = None if labels is not None: loss = self.loss(answer_score.view(-1, self.num_qa_labels), labels.view(-1)) if not return_dict: output = (answer_score,) + lxmert_output[3:] return (loss,) + output if loss is not None else output return LxmertForQuestionAnsweringOutput( loss=loss, question_answering_score=answer_score, language_hidden_states=lxmert_output.language_hidden_states, vision_hidden_states=lxmert_output.vision_hidden_states, language_attentions=lxmert_output.language_attentions, vision_attentions=lxmert_output.vision_attentions, cross_encoder_attentions=lxmert_output.cross_encoder_attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
20,859
src/transformers/models/lxmert/tokenization_lxmert.py
# coding=utf-8 # Copyright 2020 The Google AI Team, Stanford University and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import collections import os import unicodedata from typing import List, Optional, Tuple from ...tokenization_utils import PreTrainedTokenizer, _is_control, _is_punctuation, _is_whitespace from ...utils import logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "unc-nlp/lxmert-base-uncased": "https://huggingface.co/unc-nlp/lxmert-base-uncased/resolve/main/vocab.txt", } } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "unc-nlp/lxmert-base-uncased": 512, } PRETRAINED_INIT_CONFIGURATION = { "unc-nlp/lxmert-base-uncased": {"do_lower_case": True}, } # Copied from transformers.models.bert.tokenization_bert.load_vocab def load_vocab(vocab_file): """Loads a vocabulary file into a dictionary.""" vocab = collections.OrderedDict() with open(vocab_file, "r", encoding="utf-8") as reader: tokens = reader.readlines() for index, token in enumerate(tokens): token = token.rstrip("\n") vocab[token] = index return vocab # Copied from transformers.models.bert.tokenization_bert.whitespace_tokenize def whitespace_tokenize(text): """Runs basic whitespace cleaning and splitting on a piece of text.""" text = text.strip() if not text: return [] tokens = text.split() return tokens # Copied from transformers.models.bert.tokenization_bert.BertTokenizer with bert-base-cased->unc-nlp/lxmert-base-uncased, BERT->Lxmert, BertTokenizer->LxmertTokenizer class LxmertTokenizer(PreTrainedTokenizer): r""" Construct a Lxmert tokenizer. Based on WordPiece. This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): File containing the vocabulary. do_lower_case (`bool`, *optional*, defaults to `True`): Whether or not to lowercase the input when tokenizing. do_basic_tokenize (`bool`, *optional*, defaults to `True`): Whether or not to do basic tokenization before WordPiece. never_split (`Iterable`, *optional*): Collection of tokens which will never be split during tokenization. Only has an effect when `do_basic_tokenize=True` unk_token (`str`, *optional*, defaults to `"[UNK]"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"[SEP]"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"[PAD]"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"[CLS]"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"[MASK]"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see this [issue](https://github.com/huggingface/transformers/issues/328)). strip_accents (`bool`, *optional*): Whether or not to strip all accents. If this option is not specified, then it will be determined by the value for `lowercase` (as in the original Lxmert). """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES def __init__( self, vocab_file, do_lower_case=True, do_basic_tokenize=True, never_split=None, unk_token="[UNK]", sep_token="[SEP]", pad_token="[PAD]", cls_token="[CLS]", mask_token="[MASK]", tokenize_chinese_chars=True, strip_accents=None, **kwargs, ): super().__init__( do_lower_case=do_lower_case, do_basic_tokenize=do_basic_tokenize, never_split=never_split, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, **kwargs, ) if not os.path.isfile(vocab_file): raise ValueError( f"Can't find a vocabulary file at path '{vocab_file}'. To load the vocabulary from a Google pretrained" " model use `tokenizer = LxmertTokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`" ) self.vocab = load_vocab(vocab_file) self.ids_to_tokens = collections.OrderedDict([(ids, tok) for tok, ids in self.vocab.items()]) self.do_basic_tokenize = do_basic_tokenize if do_basic_tokenize: self.basic_tokenizer = BasicTokenizer( do_lower_case=do_lower_case, never_split=never_split, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, ) self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab, unk_token=self.unk_token) @property def do_lower_case(self): return self.basic_tokenizer.do_lower_case @property def vocab_size(self): return len(self.vocab) def get_vocab(self): return dict(self.vocab, **self.added_tokens_encoder) def _tokenize(self, text): split_tokens = [] if self.do_basic_tokenize: for token in self.basic_tokenizer.tokenize(text, never_split=self.all_special_tokens): # If the token is part of the never_split set if token in self.basic_tokenizer.never_split: split_tokens.append(token) else: split_tokens += self.wordpiece_tokenizer.tokenize(token) else: split_tokens = self.wordpiece_tokenizer.tokenize(text) return split_tokens def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" return self.vocab.get(token, self.vocab.get(self.unk_token)) def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" return self.ids_to_tokens.get(index, self.unk_token) def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (string) in a single string.""" out_string = " ".join(tokens).replace(" ##", "").strip() return out_string def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A Lxmert sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ if token_ids_1 is None: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + token_ids_1 + sep def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_special_tokens (`bool`, *optional*, defaults to `False`): Whether or not the token list is already formatted with special tokens for the model. Returns: `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True ) if token_ids_1 is not None: return [1] + ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1] return [1] + ([0] * len(token_ids_0)) + [1] def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. A Lxmert sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: index = 0 if os.path.isdir(save_directory): vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) else: vocab_file = (filename_prefix + "-" if filename_prefix else "") + save_directory with open(vocab_file, "w", encoding="utf-8") as writer: for token, token_index in sorted(self.vocab.items(), key=lambda kv: kv[1]): if index != token_index: logger.warning( f"Saving vocabulary to {vocab_file}: vocabulary indices are not consecutive." " Please check that the vocabulary is not corrupted!" ) index = token_index writer.write(token + "\n") index += 1 return (vocab_file,) # Copied from transformers.models.bert.tokenization_bert.BasicTokenizer class BasicTokenizer(object): """ Constructs a BasicTokenizer that will run basic tokenization (punctuation splitting, lower casing, etc.). Args: do_lower_case (`bool`, *optional*, defaults to `True`): Whether or not to lowercase the input when tokenizing. never_split (`Iterable`, *optional*): Collection of tokens which will never be split during tokenization. Only has an effect when `do_basic_tokenize=True` tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see this [issue](https://github.com/huggingface/transformers/issues/328)). strip_accents (`bool`, *optional*): Whether or not to strip all accents. If this option is not specified, then it will be determined by the value for `lowercase` (as in the original BERT). """ def __init__(self, do_lower_case=True, never_split=None, tokenize_chinese_chars=True, strip_accents=None): if never_split is None: never_split = [] self.do_lower_case = do_lower_case self.never_split = set(never_split) self.tokenize_chinese_chars = tokenize_chinese_chars self.strip_accents = strip_accents def tokenize(self, text, never_split=None): """ Basic Tokenization of a piece of text. Split on "white spaces" only, for sub-word tokenization, see WordPieceTokenizer. Args: never_split (`List[str]`, *optional*) Kept for backward compatibility purposes. Now implemented directly at the base class level (see [`PreTrainedTokenizer.tokenize`]) List of token not to split. """ # union() returns a new set by concatenating the two sets. never_split = self.never_split.union(set(never_split)) if never_split else self.never_split text = self._clean_text(text) # This was added on November 1st, 2018 for the multilingual and Chinese # models. This is also applied to the English models now, but it doesn't # matter since the English models were not trained on any Chinese data # and generally don't have any Chinese data in them (there are Chinese # characters in the vocabulary because Wikipedia does have some Chinese # words in the English Wikipedia.). if self.tokenize_chinese_chars: text = self._tokenize_chinese_chars(text) orig_tokens = whitespace_tokenize(text) split_tokens = [] for token in orig_tokens: if token not in never_split: if self.do_lower_case: token = token.lower() if self.strip_accents is not False: token = self._run_strip_accents(token) elif self.strip_accents: token = self._run_strip_accents(token) split_tokens.extend(self._run_split_on_punc(token, never_split)) output_tokens = whitespace_tokenize(" ".join(split_tokens)) return output_tokens def _run_strip_accents(self, text): """Strips accents from a piece of text.""" text = unicodedata.normalize("NFD", text) output = [] for char in text: cat = unicodedata.category(char) if cat == "Mn": continue output.append(char) return "".join(output) def _run_split_on_punc(self, text, never_split=None): """Splits punctuation on a piece of text.""" if never_split is not None and text in never_split: return [text] chars = list(text) i = 0 start_new_word = True output = [] while i < len(chars): char = chars[i] if _is_punctuation(char): output.append([char]) start_new_word = True else: if start_new_word: output.append([]) start_new_word = False output[-1].append(char) i += 1 return ["".join(x) for x in output] def _tokenize_chinese_chars(self, text): """Adds whitespace around any CJK character.""" output = [] for char in text: cp = ord(char) if self._is_chinese_char(cp): output.append(" ") output.append(char) output.append(" ") else: output.append(char) return "".join(output) def _is_chinese_char(self, cp): """Checks whether CP is the codepoint of a CJK character.""" # This defines a "chinese character" as anything in the CJK Unicode block: # https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) # # Note that the CJK Unicode block is NOT all Japanese and Korean characters, # despite its name. The modern Korean Hangul alphabet is a different block, # as is Japanese Hiragana and Katakana. Those alphabets are used to write # space-separated words, so they are not treated specially and handled # like the all of the other languages. if ( (cp >= 0x4E00 and cp <= 0x9FFF) or (cp >= 0x3400 and cp <= 0x4DBF) # or (cp >= 0x20000 and cp <= 0x2A6DF) # or (cp >= 0x2A700 and cp <= 0x2B73F) # or (cp >= 0x2B740 and cp <= 0x2B81F) # or (cp >= 0x2B820 and cp <= 0x2CEAF) # or (cp >= 0xF900 and cp <= 0xFAFF) or (cp >= 0x2F800 and cp <= 0x2FA1F) # ): # return True return False def _clean_text(self, text): """Performs invalid character removal and whitespace cleanup on text.""" output = [] for char in text: cp = ord(char) if cp == 0 or cp == 0xFFFD or _is_control(char): continue if _is_whitespace(char): output.append(" ") else: output.append(char) return "".join(output) # Copied from transformers.models.bert.tokenization_bert.WordpieceTokenizer class WordpieceTokenizer(object): """Runs WordPiece tokenization.""" def __init__(self, vocab, unk_token, max_input_chars_per_word=100): self.vocab = vocab self.unk_token = unk_token self.max_input_chars_per_word = max_input_chars_per_word def tokenize(self, text): """ Tokenizes a piece of text into its word pieces. This uses a greedy longest-match-first algorithm to perform tokenization using the given vocabulary. For example, `input = "unaffable"` wil return as output `["un", "##aff", "##able"]`. Args: text: A single token or whitespace separated tokens. This should have already been passed through *BasicTokenizer*. Returns: A list of wordpiece tokens. """ output_tokens = [] for token in whitespace_tokenize(text): chars = list(token) if len(chars) > self.max_input_chars_per_word: output_tokens.append(self.unk_token) continue is_bad = False start = 0 sub_tokens = [] while start < len(chars): end = len(chars) cur_substr = None while start < end: substr = "".join(chars[start:end]) if start > 0: substr = "##" + substr if substr in self.vocab: cur_substr = substr break end -= 1 if cur_substr is None: is_bad = True break sub_tokens.append(cur_substr) start = end if is_bad: output_tokens.append(self.unk_token) else: output_tokens.extend(sub_tokens) return output_tokens
27182812/ChatGLM-LLaMA-chinese-insturct
2,109
src/transformers/models/lxmert/convert_lxmert_original_tf_checkpoint_to_pytorch.py
# coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert LXMERT checkpoint.""" import argparse import torch from transformers import LxmertConfig, LxmertForPreTraining, load_tf_weights_in_lxmert from transformers.utils import logging logging.set_verbosity_info() def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, config_file, pytorch_dump_path): # Initialise PyTorch model config = LxmertConfig.from_json_file(config_file) print(f"Building PyTorch model from configuration: {config}") model = LxmertForPreTraining(config) # Load weights from tf checkpoint load_tf_weights_in_lxmert(model, config, tf_checkpoint_path) # Save pytorch-model print(f"Save PyTorch model to {pytorch_dump_path}") torch.save(model.state_dict(), pytorch_dump_path) if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--tf_checkpoint_path", default=None, type=str, required=True, help="Path to the TensorFlow checkpoint path." ) parser.add_argument( "--config_file", default=None, type=str, required=True, help="The config json file corresponding to the pre-trained model. \nThis specifies the model architecture.", ) parser.add_argument( "--pytorch_dump_path", default=None, type=str, required=True, help="Path to the output PyTorch model." ) args = parser.parse_args() convert_tf_checkpoint_to_pytorch(args.tf_checkpoint_path, args.config_file, args.pytorch_dump_path)
27182812/ChatGLM-LLaMA-chinese-insturct
1,848
src/transformers/models/barthez/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tokenizers_available _import_structure = {} try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_barthez"] = ["BarthezTokenizer"] try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_barthez_fast"] = ["BarthezTokenizerFast"] if TYPE_CHECKING: try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_barthez import BarthezTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_barthez_fast import BarthezTokenizerFast else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
8,898
src/transformers/models/barthez/tokenization_barthez_fast.py
# coding=utf-8 # Copyright 2020 Ecole Polytechnique and the HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License """ Tokenization classes for the BARThez model.""" import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import AddedToken from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tokenization_barthez import BarthezTokenizer else: BarthezTokenizer = None logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "sentencepiece.bpe.model", "tokenizer_file": "tokenizer.json"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "moussaKam/mbarthez": "https://huggingface.co/moussaKam/mbarthez/resolve/main/sentencepiece.bpe.model", "moussaKam/barthez": "https://huggingface.co/moussaKam/barthez/resolve/main/sentencepiece.bpe.model", "moussaKam/barthez-orangesum-title": ( "https://huggingface.co/moussaKam/barthez-orangesum-title/resolve/main/sentencepiece.bpe.model" ), }, "tokenizer_file": { "moussaKam/mbarthez": "https://huggingface.co/moussaKam/mbarthez/resolve/main/tokenizer.json", "moussaKam/barthez": "https://huggingface.co/moussaKam/barthez/resolve/main/tokenizer.json", "moussaKam/barthez-orangesum-title": ( "https://huggingface.co/moussaKam/barthez-orangesum-title/resolve/main/tokenizer.json" ), }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "moussaKam/mbarthez": 1024, "moussaKam/barthez": 1024, "moussaKam/barthez-orangesum-title": 1024, } SPIECE_UNDERLINE = "▁" class BarthezTokenizerFast(PreTrainedTokenizerFast): """ Adapted from [`CamembertTokenizer`] and [`BartTokenizer`]. Construct a "fast" BARThez tokenizer. Based on [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): [SentencePiece](https://github.com/google/sentencepiece) file (generally has a *.spm* extension) that contains the vocabulary necessary to instantiate a tokenizer. bos_token (`str`, *optional*, defaults to `"<s>"`): The beginning of sequence token that was used during pretraining. Can be used a sequence classifier token. <Tip> When building a sequence using special tokens, this is not the token that is used for the beginning of sequence. The token used is the `cls_token`. </Tip> eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. <Tip> When building a sequence using special tokens, this is not the token that is used for the end of sequence. The token used is the `sep_token`. </Tip> sep_token (`str`, *optional*, defaults to `"</s>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. cls_token (`str`, *optional*, defaults to `"<s>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. additional_special_tokens (`List[str]`, *optional*, defaults to `["<s>NOTUSED", "</s>NOTUSED"]`): Additional special tokens used by the tokenizer. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES model_input_names = ["input_ids", "attention_mask"] slow_tokenizer_class = BarthezTokenizer def __init__( self, vocab_file=None, tokenizer_file=None, bos_token="<s>", eos_token="</s>", sep_token="</s>", cls_token="<s>", unk_token="<unk>", pad_token="<pad>", mask_token="<mask>", **kwargs, ): # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token super().__init__( vocab_file, tokenizer_file=tokenizer_file, bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, cls_token=cls_token, pad_token=pad_token, mask_token=mask_token, **kwargs, ) self.vocab_file = vocab_file self.can_save_slow_tokenizer = False if not self.vocab_file else True def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A BARThez sequence has the following format: - single sequence: `<s> X </s>` - pair of sequences: `<s> A </s></s> B </s>` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ if token_ids_1 is None: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + sep + token_ids_1 + sep def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of zeros. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep + sep + token_ids_1 + sep) * [0] def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not self.can_save_slow_tokenizer: raise ValueError( "Your fast tokenizer does not have the necessary information to save the vocabulary for a slow " "tokenizer." ) if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file): copyfile(self.vocab_file, out_vocab_file) return (out_vocab_file,)
27182812/ChatGLM-LLaMA-chinese-insturct
13,191
src/transformers/models/barthez/tokenization_barthez.py
# coding=utf-8 # Copyright 2020 Ecole Polytechnique and the HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License """ Tokenization classes for the BARThez model.""" import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "sentencepiece.bpe.model"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "moussaKam/mbarthez": "https://huggingface.co/moussaKam/mbarthez/resolve/main/sentencepiece.bpe.model", "moussaKam/barthez": "https://huggingface.co/moussaKam/barthez/resolve/main/sentencepiece.bpe.model", "moussaKam/barthez-orangesum-title": ( "https://huggingface.co/moussaKam/barthez-orangesum-title/resolve/main/sentencepiece.bpe.model" ), }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "moussaKam/mbarthez": 1024, "moussaKam/barthez": 1024, "moussaKam/barthez-orangesum-title": 1024, } SPIECE_UNDERLINE = "▁" class BarthezTokenizer(PreTrainedTokenizer): """ Adapted from [`CamembertTokenizer`] and [`BartTokenizer`]. Construct a BARThez tokenizer. Based on [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): [SentencePiece](https://github.com/google/sentencepiece) file (generally has a *.spm* extension) that contains the vocabulary necessary to instantiate a tokenizer. bos_token (`str`, *optional*, defaults to `"<s>"`): The beginning of sequence token that was used during pretraining. Can be used a sequence classifier token. <Tip> When building a sequence using special tokens, this is not the token that is used for the beginning of sequence. The token used is the `cls_token`. </Tip> eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. <Tip> When building a sequence using special tokens, this is not the token that is used for the end of sequence. The token used is the `sep_token`. </Tip> sep_token (`str`, *optional*, defaults to `"</s>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. cls_token (`str`, *optional*, defaults to `"<s>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. additional_special_tokens (`List[str]`, *optional*, defaults to `["<s>NOTUSED", "</s>NOTUSED"]`): Additional special tokens used by the tokenizer. sp_model_kwargs (`dict`, *optional*): Will be passed to the `SentencePieceProcessor.__init__()` method. The [Python wrapper for SentencePiece](https://github.com/google/sentencepiece/tree/master/python) can be used, among other things, to set: - `enable_sampling`: Enable subword regularization. - `nbest_size`: Sampling parameters for unigram. Invalid for BPE-Dropout. - `nbest_size = {0,1}`: No sampling is performed. - `nbest_size > 1`: samples from the nbest_size results. - `nbest_size < 0`: assuming that nbest_size is infinite and samples from the all hypothesis (lattice) using forward-filtering-and-backward-sampling algorithm. - `alpha`: Smoothing parameter for unigram sampling, and dropout probability of merge operations for BPE-dropout. Attributes: sp_model (`SentencePieceProcessor`): The *SentencePiece* processor that is used for every conversion (string, tokens and IDs). """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES model_input_names = ["input_ids", "attention_mask"] def __init__( self, vocab_file, bos_token="<s>", eos_token="</s>", sep_token="</s>", cls_token="<s>", unk_token="<unk>", pad_token="<pad>", mask_token="<mask>", sp_model_kwargs: Optional[Dict[str, Any]] = None, **kwargs, ) -> None: # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token self.sp_model_kwargs = {} if sp_model_kwargs is None else sp_model_kwargs super().__init__( bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, cls_token=cls_token, pad_token=pad_token, mask_token=mask_token, sp_model_kwargs=self.sp_model_kwargs, **kwargs, ) self.vocab_file = vocab_file self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(str(vocab_file)) self.fairseq_tokens_to_ids = {"<s>": 0, "<pad>": 1, "</s>": 2, "<unk>": 3} self.fairseq_tokens_to_ids["<mask>"] = len(self.sp_model) - 1 self.fairseq_ids_to_tokens = {v: k for k, v in self.fairseq_tokens_to_ids.items()} def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A BARThez sequence has the following format: - single sequence: `<s> X </s>` - pair of sequences: `<s> A </s></s> B </s>` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ if token_ids_1 is None: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + sep + token_ids_1 + sep def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_special_tokens (`bool`, *optional*, defaults to `False`): Whether or not the token list is already formatted with special tokens for the model. Returns: `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True ) if token_ids_1 is None: return [1] + ([0] * len(token_ids_0)) + [1] return [1] + ([0] * len(token_ids_0)) + [1, 1] + ([0] * len(token_ids_1)) + [1] def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of zeros. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep + sep + token_ids_1 + sep) * [0] @property def vocab_size(self): return len(self.sp_model) def get_vocab(self): vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} vocab.update(self.added_tokens_encoder) return vocab def _tokenize(self, text: str) -> List[str]: return self.sp_model.encode(text, out_type=str) def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" if token in self.fairseq_tokens_to_ids: return self.fairseq_tokens_to_ids[token] spm_id = self.sp_model.PieceToId(token) return spm_id if spm_id else self.unk_token_id def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" if index in self.fairseq_ids_to_tokens: return self.fairseq_ids_to_tokens[index] return self.sp_model.IdToPiece(index) def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (string) in a single string.""" current_sub_tokens = [] out_string = "" prev_is_special = False for token in tokens: # make sure that special tokens are not decoded using sentencepiece model if token in self.all_special_tokens: if not prev_is_special: out_string += " " out_string += self.sp_model.decode(current_sub_tokens) + token prev_is_special = True current_sub_tokens = [] else: current_sub_tokens.append(token) prev_is_special = False out_string += self.sp_model.decode(current_sub_tokens) return out_string.strip() def __getstate__(self): state = self.__dict__.copy() state["sp_model"] = None return state def __setstate__(self, d): self.__dict__ = d # for backward compatibility if not hasattr(self, "sp_model_kwargs"): self.sp_model_kwargs = {} self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(self.vocab_file) def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file) and os.path.isfile(self.vocab_file): copyfile(self.vocab_file, out_vocab_file) elif not os.path.isfile(self.vocab_file): with open(out_vocab_file, "wb") as fi: content_spiece_model = self.sp_model.serialized_model_proto() fi.write(content_spiece_model) return (out_vocab_file,)
27182812/ChatGLM-LLaMA-chinese-insturct
1,727
src/transformers/models/van/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available _import_structure = {"configuration_van": ["VAN_PRETRAINED_CONFIG_ARCHIVE_MAP", "VanConfig"]} try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_van"] = [ "VAN_PRETRAINED_MODEL_ARCHIVE_LIST", "VanForImageClassification", "VanModel", "VanPreTrainedModel", ] if TYPE_CHECKING: from .configuration_van import VAN_PRETRAINED_CONFIG_ARCHIVE_MAP, VanConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_van import ( VAN_PRETRAINED_MODEL_ARCHIVE_LIST, VanForImageClassification, VanModel, VanPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure)
27182812/ChatGLM-LLaMA-chinese-insturct
10,375
src/transformers/models/van/convert_van_to_pytorch.py
# coding=utf-8 # Copyright 2022 BNRist (Tsinghua University), TKLNDST (Nankai University) and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert VAN checkpoints from the original repository. URL: https://github.com/Visual-Attention-Network/VAN-Classification""" import argparse import json import sys from dataclasses import dataclass, field from functools import partial from pathlib import Path from typing import List import torch import torch.nn as nn from huggingface_hub import cached_download, hf_hub_download from torch import Tensor from transformers import AutoFeatureExtractor, VanConfig, VanForImageClassification from transformers.models.van.modeling_van import VanLayerScaling from transformers.utils import logging logging.set_verbosity_info() logger = logging.get_logger(__name__) @dataclass class Tracker: module: nn.Module traced: List[nn.Module] = field(default_factory=list) handles: list = field(default_factory=list) def _forward_hook(self, m, inputs: Tensor, outputs: Tensor): has_not_submodules = len(list(m.modules())) == 1 or isinstance(m, nn.Conv2d) or isinstance(m, nn.BatchNorm2d) if has_not_submodules: if not isinstance(m, VanLayerScaling): self.traced.append(m) def __call__(self, x: Tensor): for m in self.module.modules(): self.handles.append(m.register_forward_hook(self._forward_hook)) self.module(x) [x.remove() for x in self.handles] return self @property def parametrized(self): # check the len of the state_dict keys to see if we have learnable params return list(filter(lambda x: len(list(x.state_dict().keys())) > 0, self.traced)) @dataclass class ModuleTransfer: src: nn.Module dest: nn.Module verbose: int = 0 src_skip: List = field(default_factory=list) dest_skip: List = field(default_factory=list) def __call__(self, x: Tensor): """ Transfer the weights of `self.src` to `self.dest` by performing a forward pass using `x` as input. Under the hood we tracked all the operations in both modules. """ dest_traced = Tracker(self.dest)(x).parametrized src_traced = Tracker(self.src)(x).parametrized src_traced = list(filter(lambda x: type(x) not in self.src_skip, src_traced)) dest_traced = list(filter(lambda x: type(x) not in self.dest_skip, dest_traced)) if len(dest_traced) != len(src_traced): raise Exception( f"Numbers of operations are different. Source module has {len(src_traced)} operations while" f" destination module has {len(dest_traced)}." ) for dest_m, src_m in zip(dest_traced, src_traced): dest_m.load_state_dict(src_m.state_dict()) if self.verbose == 1: print(f"Transfered from={src_m} to={dest_m}") def copy_parameters(from_model: nn.Module, our_model: nn.Module) -> nn.Module: # nn.Parameter cannot be tracked by the Tracker, thus we need to manually convert them from_state_dict = from_model.state_dict() our_state_dict = our_model.state_dict() config = our_model.config all_keys = [] for stage_idx in range(len(config.hidden_sizes)): for block_id in range(config.depths[stage_idx]): from_key = f"block{stage_idx + 1}.{block_id}.layer_scale_1" to_key = f"van.encoder.stages.{stage_idx}.layers.{block_id}.attention_scaling.weight" all_keys.append((from_key, to_key)) from_key = f"block{stage_idx + 1}.{block_id}.layer_scale_2" to_key = f"van.encoder.stages.{stage_idx}.layers.{block_id}.mlp_scaling.weight" all_keys.append((from_key, to_key)) for from_key, to_key in all_keys: our_state_dict[to_key] = from_state_dict.pop(from_key) our_model.load_state_dict(our_state_dict) return our_model def convert_weight_and_push( name: str, config: VanConfig, checkpoint: str, from_model: nn.Module, save_directory: Path, push_to_hub: bool = True, ): print(f"Downloading weights for {name}...") checkpoint_path = cached_download(checkpoint) print(f"Converting {name}...") from_state_dict = torch.load(checkpoint_path)["state_dict"] from_model.load_state_dict(from_state_dict) from_model.eval() with torch.no_grad(): our_model = VanForImageClassification(config).eval() module_transfer = ModuleTransfer(src=from_model, dest=our_model) x = torch.randn((1, 3, 224, 224)) module_transfer(x) our_model = copy_parameters(from_model, our_model) if not torch.allclose(from_model(x), our_model(x).logits): raise ValueError("The model logits don't match the original one.") checkpoint_name = name print(checkpoint_name) if push_to_hub: our_model.push_to_hub( repo_path_or_name=save_directory / checkpoint_name, commit_message="Add model", use_temp_dir=True, ) # we can use the convnext one feature_extractor = AutoFeatureExtractor.from_pretrained("facebook/convnext-base-224-22k-1k") feature_extractor.push_to_hub( repo_path_or_name=save_directory / checkpoint_name, commit_message="Add feature extractor", use_temp_dir=True, ) print(f"Pushed {checkpoint_name}") def convert_weights_and_push(save_directory: Path, model_name: str = None, push_to_hub: bool = True): filename = "imagenet-1k-id2label.json" num_labels = 1000 repo_id = "huggingface/label-files" num_labels = num_labels id2label = json.load(open(hf_hub_download(repo_id, filename, repo_type="dataset"), "r")) id2label = {int(k): v for k, v in id2label.items()} id2label = id2label label2id = {v: k for k, v in id2label.items()} ImageNetPreTrainedConfig = partial(VanConfig, num_labels=num_labels, id2label=id2label, label2id=label2id) names_to_config = { "van-tiny": ImageNetPreTrainedConfig( hidden_sizes=[32, 64, 160, 256], depths=[3, 3, 5, 2], mlp_ratios=[8, 8, 4, 4], ), "van-small": ImageNetPreTrainedConfig( hidden_sizes=[64, 128, 320, 512], depths=[2, 2, 4, 2], mlp_ratios=[8, 8, 4, 4], ), "van-base": ImageNetPreTrainedConfig( hidden_sizes=[64, 128, 320, 512], depths=[3, 3, 12, 3], mlp_ratios=[8, 8, 4, 4], ), "van-large": ImageNetPreTrainedConfig( hidden_sizes=[64, 128, 320, 512], depths=[3, 5, 27, 3], mlp_ratios=[8, 8, 4, 4], ), } names_to_original_models = { "van-tiny": van_tiny, "van-small": van_small, "van-base": van_base, "van-large": van_large, } names_to_original_checkpoints = { "van-tiny": ( "https://huggingface.co/Visual-Attention-Network/VAN-Tiny-original/resolve/main/van_tiny_754.pth.tar" ), "van-small": ( "https://huggingface.co/Visual-Attention-Network/VAN-Small-original/resolve/main/van_small_811.pth.tar" ), "van-base": ( "https://huggingface.co/Visual-Attention-Network/VAN-Base-original/resolve/main/van_base_828.pth.tar" ), "van-large": ( "https://huggingface.co/Visual-Attention-Network/VAN-Large-original/resolve/main/van_large_839.pth.tar" ), } if model_name: convert_weight_and_push( model_name, names_to_config[model_name], checkpoint=names_to_original_checkpoints[model_name], from_model=names_to_original_models[model_name](), save_directory=save_directory, push_to_hub=push_to_hub, ) else: for model_name, config in names_to_config.items(): convert_weight_and_push( model_name, config, checkpoint=names_to_original_checkpoints[model_name], from_model=names_to_original_models[model_name](), save_directory=save_directory, push_to_hub=push_to_hub, ) if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--model-name", default=None, type=str, help=( "The name of the model you wish to convert, it must be one of the supported resnet* architecture," " currently: van-tiny/small/base/large. If `None`, all of them will the converted." ), ) parser.add_argument( "--pytorch_dump_folder_path", default=None, type=Path, required=True, help="Path to the output PyTorch model directory.", ) parser.add_argument( "--van_dir", required=True, type=Path, help=( "A path to VAN's original implementation directory. You can download from here:" " https://github.com/Visual-Attention-Network/VAN-Classification" ), ) parser.add_argument( "--push_to_hub", default=True, type=bool, required=False, help="If True, push model and feature extractor to the hub.", ) args = parser.parse_args() pytorch_dump_folder_path: Path = args.pytorch_dump_folder_path pytorch_dump_folder_path.mkdir(exist_ok=True, parents=True) van_dir = args.van_dir # append the path to the parents to maskformer dir sys.path.append(str(van_dir.parent)) from van.models.van import van_base, van_large, van_small, van_tiny convert_weights_and_push(pytorch_dump_folder_path, args.model_name, args.push_to_hub)
27182812/ChatGLM-LLaMA-chinese-insturct
4,835
src/transformers/models/van/configuration_van.py
# coding=utf-8 # Copyright 2022 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ VAN model configuration""" from ...configuration_utils import PretrainedConfig from ...utils import logging logger = logging.get_logger(__name__) VAN_PRETRAINED_CONFIG_ARCHIVE_MAP = { "Visual-Attention-Network/van-base": ( "https://huggingface.co/Visual-Attention-Network/van-base/blob/main/config.json" ), } class VanConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`VanModel`]. It is used to instantiate a VAN model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the VAN [Visual-Attention-Network/van-base](https://huggingface.co/Visual-Attention-Network/van-base) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: image_size (`int`, *optional*, defaults to 224): The size (resolution) of each image. num_channels (`int`, *optional*, defaults to 3): The number of input channels. patch_sizes (`List[int]`, *optional*, defaults to `[7, 3, 3, 3]`): Patch size to use in each stage's embedding layer. strides (`List[int]`, *optional*, defaults to `[4, 2, 2, 2]`): Stride size to use in each stage's embedding layer to downsample the input. hidden_sizes (`List[int]`, *optional*, defaults to `[64, 128, 320, 512]`): Dimensionality (hidden size) at each stage. depths (`List[int]`, *optional*, defaults to `[3, 3, 12, 3]`): Depth (number of layers) for each stage. mlp_ratios (`List[int]`, *optional*, defaults to `[8, 8, 4, 4]`): The expansion ratio for mlp layer at each stage. hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in each layer. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. layer_scale_init_value (`float`, *optional*, defaults to 1e-2): The initial value for layer scaling. drop_path_rate (`float`, *optional*, defaults to 0.0): The dropout probability for stochastic depth. dropout_rate (`float`, *optional*, defaults to 0.0): The dropout probability for dropout. Example: ```python >>> from transformers import VanModel, VanConfig >>> # Initializing a VAN van-base style configuration >>> configuration = VanConfig() >>> # Initializing a model from the van-base style configuration >>> model = VanModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "van" def __init__( self, image_size=224, num_channels=3, patch_sizes=[7, 3, 3, 3], strides=[4, 2, 2, 2], hidden_sizes=[64, 128, 320, 512], depths=[3, 3, 12, 3], mlp_ratios=[8, 8, 4, 4], hidden_act="gelu", initializer_range=0.02, layer_norm_eps=1e-6, layer_scale_init_value=1e-2, drop_path_rate=0.0, dropout_rate=0.0, **kwargs, ): super().__init__(**kwargs) self.image_size = image_size self.num_channels = num_channels self.patch_sizes = patch_sizes self.strides = strides self.hidden_sizes = hidden_sizes self.depths = depths self.mlp_ratios = mlp_ratios self.hidden_act = hidden_act self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.layer_scale_init_value = layer_scale_init_value self.drop_path_rate = drop_path_rate self.dropout_rate = dropout_rate
27182812/ChatGLM-LLaMA-chinese-insturct
21,572
src/transformers/models/van/modeling_van.py
# coding=utf-8 # Copyright 2022 BNRist (Tsinghua University), TKLNDST (Nankai University) and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch Visual Attention Network (VAN) model.""" import math from collections import OrderedDict from typing import Optional, Tuple, Union import torch import torch.utils.checkpoint from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACT2FN from ...modeling_outputs import ( BaseModelOutputWithNoAttention, BaseModelOutputWithPoolingAndNoAttention, ImageClassifierOutputWithNoAttention, ) from ...modeling_utils import PreTrainedModel from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging from .configuration_van import VanConfig logger = logging.get_logger(__name__) # General docstring _CONFIG_FOR_DOC = "VanConfig" # Base docstring _CHECKPOINT_FOR_DOC = "Visual-Attention-Network/van-base" _EXPECTED_OUTPUT_SHAPE = [1, 512, 7, 7] # Image classification docstring _IMAGE_CLASS_CHECKPOINT = "Visual-Attention-Network/van-base" _IMAGE_CLASS_EXPECTED_OUTPUT = "tabby, tabby cat" VAN_PRETRAINED_MODEL_ARCHIVE_LIST = [ "Visual-Attention-Network/van-base", # See all VAN models at https://huggingface.co/models?filter=van ] # Copied from transformers.models.convnext.modeling_convnext.drop_path def drop_path(input, drop_prob: float = 0.0, training: bool = False): """ Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks, however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper... See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the argument. """ if drop_prob == 0.0 or not training: return input keep_prob = 1 - drop_prob shape = (input.shape[0],) + (1,) * (input.ndim - 1) # work with diff dim tensors, not just 2D ConvNets random_tensor = keep_prob + torch.rand(shape, dtype=input.dtype, device=input.device) random_tensor.floor_() # binarize output = input.div(keep_prob) * random_tensor return output # Copied from transformers.models.convnext.modeling_convnext.ConvNextDropPath with ConvNext->Van class VanDropPath(nn.Module): """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" def __init__(self, drop_prob: Optional[float] = None) -> None: super().__init__() self.drop_prob = drop_prob def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: return drop_path(hidden_states, self.drop_prob, self.training) def extra_repr(self) -> str: return "p={}".format(self.drop_prob) class VanOverlappingPatchEmbedder(nn.Module): """ Downsamples the input using a patchify operation with a `stride` of 4 by default making adjacent windows overlap by half of the area. From [PVTv2: Improved Baselines with Pyramid Vision Transformer](https://arxiv.org/abs/2106.13797). """ def __init__(self, in_channels: int, hidden_size: int, patch_size: int = 7, stride: int = 4): super().__init__() self.convolution = nn.Conv2d( in_channels, hidden_size, kernel_size=patch_size, stride=stride, padding=patch_size // 2 ) self.normalization = nn.BatchNorm2d(hidden_size) def forward(self, input: torch.Tensor) -> torch.Tensor: hidden_state = self.convolution(input) hidden_state = self.normalization(hidden_state) return hidden_state class VanMlpLayer(nn.Module): """ MLP with depth-wise convolution, from [PVTv2: Improved Baselines with Pyramid Vision Transformer](https://arxiv.org/abs/2106.13797). """ def __init__( self, in_channels: int, hidden_size: int, out_channels: int, hidden_act: str = "gelu", dropout_rate: float = 0.5, ): super().__init__() self.in_dense = nn.Conv2d(in_channels, hidden_size, kernel_size=1) self.depth_wise = nn.Conv2d(hidden_size, hidden_size, kernel_size=3, padding=1, groups=hidden_size) self.activation = ACT2FN[hidden_act] self.dropout1 = nn.Dropout(dropout_rate) self.out_dense = nn.Conv2d(hidden_size, out_channels, kernel_size=1) self.dropout2 = nn.Dropout(dropout_rate) def forward(self, hidden_state: torch.Tensor) -> torch.Tensor: hidden_state = self.in_dense(hidden_state) hidden_state = self.depth_wise(hidden_state) hidden_state = self.activation(hidden_state) hidden_state = self.dropout1(hidden_state) hidden_state = self.out_dense(hidden_state) hidden_state = self.dropout2(hidden_state) return hidden_state class VanLargeKernelAttention(nn.Module): """ Basic Large Kernel Attention (LKA). """ def __init__(self, hidden_size: int): super().__init__() self.depth_wise = nn.Conv2d(hidden_size, hidden_size, kernel_size=5, padding=2, groups=hidden_size) self.depth_wise_dilated = nn.Conv2d( hidden_size, hidden_size, kernel_size=7, dilation=3, padding=9, groups=hidden_size ) self.point_wise = nn.Conv2d(hidden_size, hidden_size, kernel_size=1) def forward(self, hidden_state: torch.Tensor) -> torch.Tensor: hidden_state = self.depth_wise(hidden_state) hidden_state = self.depth_wise_dilated(hidden_state) hidden_state = self.point_wise(hidden_state) return hidden_state class VanLargeKernelAttentionLayer(nn.Module): """ Computes attention using Large Kernel Attention (LKA) and attends the input. """ def __init__(self, hidden_size: int): super().__init__() self.attention = VanLargeKernelAttention(hidden_size) def forward(self, hidden_state: torch.Tensor) -> torch.Tensor: attention = self.attention(hidden_state) attended = hidden_state * attention return attended class VanSpatialAttentionLayer(nn.Module): """ Van spatial attention layer composed by projection (via conv) -> act -> Large Kernel Attention (LKA) attention -> projection (via conv) + residual connection. """ def __init__(self, hidden_size: int, hidden_act: str = "gelu"): super().__init__() self.pre_projection = nn.Sequential( OrderedDict( [ ("conv", nn.Conv2d(hidden_size, hidden_size, kernel_size=1)), ("act", ACT2FN[hidden_act]), ] ) ) self.attention_layer = VanLargeKernelAttentionLayer(hidden_size) self.post_projection = nn.Conv2d(hidden_size, hidden_size, kernel_size=1) def forward(self, hidden_state: torch.Tensor) -> torch.Tensor: residual = hidden_state hidden_state = self.pre_projection(hidden_state) hidden_state = self.attention_layer(hidden_state) hidden_state = self.post_projection(hidden_state) hidden_state = hidden_state + residual return hidden_state class VanLayerScaling(nn.Module): """ Scales the inputs by a learnable parameter initialized by `initial_value`. """ def __init__(self, hidden_size: int, initial_value: float = 1e-2): super().__init__() self.weight = nn.Parameter(initial_value * torch.ones((hidden_size)), requires_grad=True) def forward(self, hidden_state: torch.Tensor) -> torch.Tensor: # unsqueezing for broadcasting hidden_state = self.weight.unsqueeze(-1).unsqueeze(-1) * hidden_state return hidden_state class VanLayer(nn.Module): """ Van layer composed by normalization layers, large kernel attention (LKA) and a multi layer perceptron (MLP). """ def __init__( self, config: VanConfig, hidden_size: int, mlp_ratio: int = 4, drop_path_rate: float = 0.5, ): super().__init__() self.drop_path = VanDropPath(drop_path_rate) if drop_path_rate > 0.0 else nn.Identity() self.pre_normomalization = nn.BatchNorm2d(hidden_size) self.attention = VanSpatialAttentionLayer(hidden_size, config.hidden_act) self.attention_scaling = VanLayerScaling(hidden_size, config.layer_scale_init_value) self.post_normalization = nn.BatchNorm2d(hidden_size) self.mlp = VanMlpLayer( hidden_size, hidden_size * mlp_ratio, hidden_size, config.hidden_act, config.dropout_rate ) self.mlp_scaling = VanLayerScaling(hidden_size, config.layer_scale_init_value) def forward(self, hidden_state: torch.Tensor) -> torch.Tensor: residual = hidden_state # attention hidden_state = self.pre_normomalization(hidden_state) hidden_state = self.attention(hidden_state) hidden_state = self.attention_scaling(hidden_state) hidden_state = self.drop_path(hidden_state) # residual connection hidden_state = residual + hidden_state residual = hidden_state # mlp hidden_state = self.post_normalization(hidden_state) hidden_state = self.mlp(hidden_state) hidden_state = self.mlp_scaling(hidden_state) hidden_state = self.drop_path(hidden_state) # residual connection hidden_state = residual + hidden_state return hidden_state class VanStage(nn.Module): """ VanStage, consisting of multiple layers. """ def __init__( self, config: VanConfig, in_channels: int, hidden_size: int, patch_size: int, stride: int, depth: int, mlp_ratio: int = 4, drop_path_rate: float = 0.0, ): super().__init__() self.embeddings = VanOverlappingPatchEmbedder(in_channels, hidden_size, patch_size, stride) self.layers = nn.Sequential( *[ VanLayer( config, hidden_size, mlp_ratio=mlp_ratio, drop_path_rate=drop_path_rate, ) for _ in range(depth) ] ) self.normalization = nn.LayerNorm(hidden_size, eps=config.layer_norm_eps) def forward(self, hidden_state: torch.Tensor) -> torch.Tensor: hidden_state = self.embeddings(hidden_state) hidden_state = self.layers(hidden_state) # rearrange b c h w -> b (h w) c batch_size, hidden_size, height, width = hidden_state.shape hidden_state = hidden_state.flatten(2).transpose(1, 2) hidden_state = self.normalization(hidden_state) # rearrange b (h w) c- > b c h w hidden_state = hidden_state.view(batch_size, height, width, hidden_size).permute(0, 3, 1, 2) return hidden_state class VanEncoder(nn.Module): """ VanEncoder, consisting of multiple stages. """ def __init__(self, config: VanConfig): super().__init__() self.stages = nn.ModuleList([]) patch_sizes = config.patch_sizes strides = config.strides hidden_sizes = config.hidden_sizes depths = config.depths mlp_ratios = config.mlp_ratios drop_path_rates = [x.item() for x in torch.linspace(0, config.drop_path_rate, sum(config.depths))] for num_stage, (patch_size, stride, hidden_size, depth, mlp_expantion, drop_path_rate) in enumerate( zip(patch_sizes, strides, hidden_sizes, depths, mlp_ratios, drop_path_rates) ): is_first_stage = num_stage == 0 in_channels = hidden_sizes[num_stage - 1] if is_first_stage: in_channels = config.num_channels self.stages.append( VanStage( config, in_channels, hidden_size, patch_size=patch_size, stride=stride, depth=depth, mlp_ratio=mlp_expantion, drop_path_rate=drop_path_rate, ) ) def forward( self, hidden_state: torch.Tensor, output_hidden_states: Optional[bool] = False, return_dict: Optional[bool] = True, ) -> Union[Tuple, BaseModelOutputWithNoAttention]: all_hidden_states = () if output_hidden_states else None for _, stage_module in enumerate(self.stages): hidden_state = stage_module(hidden_state) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_state,) if not return_dict: return tuple(v for v in [hidden_state, all_hidden_states] if v is not None) return BaseModelOutputWithNoAttention(last_hidden_state=hidden_state, hidden_states=all_hidden_states) class VanPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = VanConfig base_model_prefix = "van" main_input_name = "pixel_values" supports_gradient_checkpointing = True def _init_weights(self, module): """Initialize the weights""" if isinstance(module, nn.Linear): nn.init.trunc_normal_(module.weight, std=self.config.initializer_range) if isinstance(module, nn.Linear) and module.bias is not None: nn.init.constant_(module.bias, 0) elif isinstance(module, nn.LayerNorm): nn.init.constant_(module.bias, 0) nn.init.constant_(module.weight, 1.0) elif isinstance(module, nn.Conv2d): fan_out = module.kernel_size[0] * module.kernel_size[1] * module.out_channels fan_out //= module.groups module.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) if module.bias is not None: module.bias.data.zero_() def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, VanModel): module.gradient_checkpointing = value VAN_START_DOCSTRING = r""" This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`VanConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ VAN_INPUTS_DOCSTRING = r""" Args: pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): Pixel values. Pixel values can be obtained using [`AutoImageProcessor`]. See [`ConvNextImageProcessor.__call__`] for details. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all stages. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare VAN model outputting raw features without any specific head on top. Note, VAN does not have an embedding" " layer.", VAN_START_DOCSTRING, ) class VanModel(VanPreTrainedModel): def __init__(self, config): super().__init__(config) self.config = config self.encoder = VanEncoder(config) # final layernorm layer self.layernorm = nn.LayerNorm(config.hidden_sizes[-1], eps=config.layer_norm_eps) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(VAN_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=BaseModelOutputWithPoolingAndNoAttention, config_class=_CONFIG_FOR_DOC, modality="vision", expected_output=_EXPECTED_OUTPUT_SHAPE, ) def forward( self, pixel_values: Optional[torch.FloatTensor], output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, BaseModelOutputWithPoolingAndNoAttention]: output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict encoder_outputs = self.encoder( pixel_values, output_hidden_states=output_hidden_states, return_dict=return_dict, ) last_hidden_state = encoder_outputs[0] # global average pooling, n c w h -> n c pooled_output = last_hidden_state.mean(dim=[-2, -1]) if not return_dict: return (last_hidden_state, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPoolingAndNoAttention( last_hidden_state=last_hidden_state, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, ) @add_start_docstrings( """ VAN Model with an image classification head on top (a linear layer on top of the pooled features), e.g. for ImageNet. """, VAN_START_DOCSTRING, ) class VanForImageClassification(VanPreTrainedModel): def __init__(self, config): super().__init__(config) self.van = VanModel(config) # Classifier head self.classifier = ( nn.Linear(config.hidden_sizes[-1], config.num_labels) if config.num_labels > 0 else nn.Identity() ) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(VAN_INPUTS_DOCSTRING) @add_code_sample_docstrings( checkpoint=_IMAGE_CLASS_CHECKPOINT, output_type=ImageClassifierOutputWithNoAttention, config_class=_CONFIG_FOR_DOC, expected_output=_IMAGE_CLASS_EXPECTED_OUTPUT, ) def forward( self, pixel_values: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, ImageClassifierOutputWithNoAttention]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the image classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.van(pixel_values, output_hidden_states=output_hidden_states, return_dict=return_dict) pooled_output = outputs.pooler_output if return_dict else outputs[1] logits = self.classifier(pooled_output) loss = None if labels is not None: if self.config.problem_type is None: if self.config.num_labels == 1: self.config.problem_type = "regression" elif self.config.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): self.config.problem_type = "single_label_classification" else: self.config.problem_type = "multi_label_classification" if self.config.problem_type == "regression": loss_fct = MSELoss() if self.config.num_labels == 1: loss = loss_fct(logits.squeeze(), labels.squeeze()) else: loss = loss_fct(logits, labels) elif self.config.problem_type == "single_label_classification": loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.config.num_labels), labels.view(-1)) elif self.config.problem_type == "multi_label_classification": loss_fct = BCEWithLogitsLoss() loss = loss_fct(logits, labels) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return ImageClassifierOutputWithNoAttention(loss=loss, logits=logits, hidden_states=outputs.hidden_states)
27182812/ChatGLM-LLaMA-chinese-insturct
18,479
src/transformers/models/blenderbot_small/configuration_blenderbot_small.py
# coding=utf-8 # Copyright 2021 The Facebook, Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ BlenderbotSmall model configuration""" from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer from ...configuration_utils import PretrainedConfig from ...file_utils import TensorType, is_torch_available from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeq2SeqConfigWithPast from ...onnx.utils import compute_effective_axis_dimension from ...utils import logging logger = logging.get_logger(__name__) BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP = { "facebook/blenderbot_small-90M": "https://huggingface.co/facebook/blenderbot_small-90M/resolve/main/config.json", # See all BlenderbotSmall models at https://huggingface.co/models?filter=blenderbot_small } class BlenderbotSmallConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`BlenderbotSmallModel`]. It is used to instantiate an BlenderbotSmall model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the BlenderbotSmall [facebook/blenderbot_small-90M](https://huggingface.co/facebook/blenderbot_small-90M) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 50265): Vocabulary size of the BlenderbotSmall model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`BlenderbotSmallModel`] or [`TFBlenderbotSmallModel`]. d_model (`int`, *optional*, defaults to 512): Dimensionality of the layers and the pooler layer. encoder_layers (`int`, *optional*, defaults to 8): Number of encoder layers. decoder_layers (`int`, *optional*, defaults to 8): Number of decoder layers. encoder_attention_heads (`int`, *optional*, defaults to 16): Number of attention heads for each attention layer in the Transformer encoder. decoder_attention_heads (`int`, *optional*, defaults to 16): Number of attention heads for each attention layer in the Transformer decoder. decoder_ffn_dim (`int`, *optional*, defaults to 2048): Dimensionality of the "intermediate" (often named feed-forward) layer in decoder. encoder_ffn_dim (`int`, *optional*, defaults to 2048): Dimensionality of the "intermediate" (often named feed-forward) layer in decoder. activation_function (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"silu"` and `"gelu_new"` are supported. dropout (`float`, *optional*, defaults to 0.1): The dropout probability for all fully connected layers in the embeddings, encoder, and pooler. attention_dropout (`float`, *optional*, defaults to 0.0): The dropout ratio for the attention probabilities. activation_dropout (`float`, *optional*, defaults to 0.0): The dropout ratio for activations inside the fully connected layer. max_position_embeddings (`int`, *optional*, defaults to 512): The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 512 or 1024 or 2048). init_std (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. encoder_layerdrop (`float`, *optional*, defaults to 0.0): The LayerDrop probability for the encoder. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556) for more details. decoder_layerdrop (`float`, *optional*, defaults to 0.0): The LayerDrop probability for the decoder. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556) for more details. scale_embedding (`bool`, *optional*, defaults to `False`): Scale embeddings by diving by sqrt(d_model). use_cache (`bool`, *optional*, defaults to `True`): Whether or not the model should return the last key/values attentions (not used by all models) forced_eos_token_id (`int`, *optional*, defaults to 2): The id of the token to force as the last generated token when `max_length` is reached. Usually set to `eos_token_id`. Example: ```python >>> from transformers import BlenderbotSmallConfig, BlenderbotSmallModel >>> # Initializing a BlenderbotSmall facebook/blenderbot_small-90M style configuration >>> configuration = BlenderbotSmallConfig() >>> # Initializing a model (with random weights) from the facebook/blenderbot_small-90M style configuration >>> model = BlenderbotSmallModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "blenderbot-small" keys_to_ignore_at_inference = ["past_key_values"] attribute_map = {"num_attention_heads": "encoder_attention_heads", "hidden_size": "d_model"} def __init__( self, vocab_size=50265, max_position_embeddings=512, encoder_layers=8, encoder_ffn_dim=2048, encoder_attention_heads=16, decoder_layers=8, decoder_ffn_dim=2048, decoder_attention_heads=16, encoder_layerdrop=0.0, decoder_layerdrop=0.0, use_cache=True, is_encoder_decoder=True, activation_function="gelu", d_model=512, dropout=0.1, attention_dropout=0.0, activation_dropout=0.0, init_std=0.02, decoder_start_token_id=1, scale_embedding=False, pad_token_id=0, bos_token_id=1, eos_token_id=2, forced_eos_token_id=2, **kwargs, ): self.vocab_size = vocab_size self.max_position_embeddings = max_position_embeddings self.d_model = d_model self.encoder_ffn_dim = encoder_ffn_dim self.encoder_layers = encoder_layers self.encoder_attention_heads = encoder_attention_heads self.decoder_ffn_dim = decoder_ffn_dim self.decoder_layers = decoder_layers self.decoder_attention_heads = decoder_attention_heads self.dropout = dropout self.attention_dropout = attention_dropout self.activation_dropout = activation_dropout self.activation_function = activation_function self.init_std = init_std self.encoder_layerdrop = encoder_layerdrop self.decoder_layerdrop = decoder_layerdrop self.use_cache = use_cache self.num_hidden_layers = encoder_layers self.scale_embedding = scale_embedding # scale factor will be sqrt(d_model) if True super().__init__( pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, is_encoder_decoder=is_encoder_decoder, decoder_start_token_id=decoder_start_token_id, forced_eos_token_id=forced_eos_token_id, **kwargs, ) # Copied from transformers.models.bart.configuration_bart.BartOnnxConfig class BlenderbotSmallOnnxConfig(OnnxSeq2SeqConfigWithPast): @property def inputs(self) -> Mapping[str, Mapping[int, str]]: if self.task in ["default", "seq2seq-lm"]: common_inputs = OrderedDict( [ ("input_ids", {0: "batch", 1: "encoder_sequence"}), ("attention_mask", {0: "batch", 1: "encoder_sequence"}), ] ) if self.use_past: common_inputs["decoder_input_ids"] = {0: "batch"} common_inputs["decoder_attention_mask"] = {0: "batch", 1: "past_decoder_sequence + sequence"} else: common_inputs["decoder_input_ids"] = {0: "batch", 1: "decoder_sequence"} common_inputs["decoder_attention_mask"] = {0: "batch", 1: "decoder_sequence"} if self.use_past: self.fill_with_past_key_values_(common_inputs, direction="inputs") elif self.task == "causal-lm": # TODO: figure this case out. common_inputs = OrderedDict( [ ("input_ids", {0: "batch", 1: "encoder_sequence"}), ("attention_mask", {0: "batch", 1: "encoder_sequence"}), ] ) if self.use_past: num_encoder_layers, _ = self.num_layers for i in range(num_encoder_layers): common_inputs[f"past_key_values.{i}.key"] = {0: "batch", 2: "past_sequence + sequence"} common_inputs[f"past_key_values.{i}.value"] = {0: "batch", 2: "past_sequence + sequence"} else: common_inputs = OrderedDict( [ ("input_ids", {0: "batch", 1: "encoder_sequence"}), ("attention_mask", {0: "batch", 1: "encoder_sequence"}), ("decoder_input_ids", {0: "batch", 1: "decoder_sequence"}), ("decoder_attention_mask", {0: "batch", 1: "decoder_sequence"}), ] ) return common_inputs @property def outputs(self) -> Mapping[str, Mapping[int, str]]: if self.task in ["default", "seq2seq-lm"]: common_outputs = super().outputs else: common_outputs = super(OnnxConfigWithPast, self).outputs if self.use_past: num_encoder_layers, _ = self.num_layers for i in range(num_encoder_layers): common_outputs[f"present.{i}.key"] = {0: "batch", 2: "past_sequence + sequence"} common_outputs[f"present.{i}.value"] = {0: "batch", 2: "past_sequence + sequence"} return common_outputs def _generate_dummy_inputs_for_default_and_seq2seq_lm( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: encoder_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size, seq_length, is_pair, framework ) # Generate decoder inputs decoder_seq_length = seq_length if not self.use_past else 1 decoder_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size, decoder_seq_length, is_pair, framework ) decoder_inputs = {f"decoder_{name}": tensor for name, tensor in decoder_inputs.items()} common_inputs = dict(**encoder_inputs, **decoder_inputs) if self.use_past: if not is_torch_available(): raise ValueError("Cannot generate dummy past_keys inputs without PyTorch installed.") else: import torch batch, encoder_seq_length = common_inputs["input_ids"].shape decoder_seq_length = common_inputs["decoder_input_ids"].shape[1] num_encoder_attention_heads, num_decoder_attention_heads = self.num_attention_heads encoder_shape = ( batch, num_encoder_attention_heads, encoder_seq_length, self._config.hidden_size // num_encoder_attention_heads, ) decoder_past_length = decoder_seq_length + 3 decoder_shape = ( batch, num_decoder_attention_heads, decoder_past_length, self._config.hidden_size // num_decoder_attention_heads, ) common_inputs["decoder_attention_mask"] = torch.cat( [common_inputs["decoder_attention_mask"], torch.ones(batch, decoder_past_length)], dim=1 ) common_inputs["past_key_values"] = [] # If the number of encoder and decoder layers are present in the model configuration, both are considered num_encoder_layers, num_decoder_layers = self.num_layers min_num_layers = min(num_encoder_layers, num_decoder_layers) max_num_layers = max(num_encoder_layers, num_decoder_layers) - min_num_layers remaining_side_name = "encoder" if num_encoder_layers > num_decoder_layers else "decoder" for _ in range(min_num_layers): common_inputs["past_key_values"].append( ( torch.zeros(decoder_shape), torch.zeros(decoder_shape), torch.zeros(encoder_shape), torch.zeros(encoder_shape), ) ) # TODO: test this. shape = encoder_shape if remaining_side_name == "encoder" else decoder_shape for _ in range(min_num_layers, max_num_layers): common_inputs["past_key_values"].append((torch.zeros(shape), torch.zeros(shape))) return common_inputs def _generate_dummy_inputs_for_causal_lm( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: common_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size, seq_length, is_pair, framework ) if self.use_past: if not is_torch_available(): raise ValueError("Cannot generate dummy past_keys inputs without PyTorch installed.") else: import torch batch, seqlen = common_inputs["input_ids"].shape # Not using the same length for past_key_values past_key_values_length = seqlen + 2 num_encoder_layers, _ = self.num_layers num_encoder_attention_heads, _ = self.num_attention_heads past_shape = ( batch, num_encoder_attention_heads, past_key_values_length, self._config.hidden_size // num_encoder_attention_heads, ) mask_dtype = common_inputs["attention_mask"].dtype common_inputs["attention_mask"] = torch.cat( [common_inputs["attention_mask"], torch.ones(batch, past_key_values_length, dtype=mask_dtype)], dim=1 ) common_inputs["past_key_values"] = [ (torch.zeros(past_shape), torch.zeros(past_shape)) for _ in range(num_encoder_layers) ] return common_inputs def _generate_dummy_inputs_for_sequence_classification_and_question_answering( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: # Copied from OnnxConfig.generate_dummy_inputs # Did not use super(OnnxConfigWithPast, self).generate_dummy_inputs for code clarity. # If dynamic axis (-1) we forward with a fixed dimension of 2 samples to avoid optimizations made by ONNX batch_size = compute_effective_axis_dimension( batch_size, fixed_dimension=OnnxConfig.default_fixed_batch, num_token_to_add=0 ) # If dynamic axis (-1) we forward with a fixed dimension of 8 tokens to avoid optimizations made by ONNX token_to_add = tokenizer.num_special_tokens_to_add(is_pair) seq_length = compute_effective_axis_dimension( seq_length, fixed_dimension=OnnxConfig.default_fixed_sequence, num_token_to_add=token_to_add ) # Generate dummy inputs according to compute batch and sequence dummy_input = [" ".join([tokenizer.unk_token]) * seq_length] * batch_size common_inputs = dict(tokenizer(dummy_input, return_tensors=framework)) return common_inputs def generate_dummy_inputs( self, tokenizer: PreTrainedTokenizer, batch_size: int = -1, seq_length: int = -1, is_pair: bool = False, framework: Optional[TensorType] = None, ) -> Mapping[str, Any]: if self.task in ["default", "seq2seq-lm"]: common_inputs = self._generate_dummy_inputs_for_default_and_seq2seq_lm( tokenizer, batch_size=batch_size, seq_length=seq_length, is_pair=is_pair, framework=framework ) elif self.task == "causal-lm": common_inputs = self._generate_dummy_inputs_for_causal_lm( tokenizer, batch_size=batch_size, seq_length=seq_length, is_pair=is_pair, framework=framework ) else: common_inputs = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( tokenizer, batch_size=batch_size, seq_length=seq_length, is_pair=is_pair, framework=framework ) return common_inputs def _flatten_past_key_values_(self, flattened_output, name, idx, t): if self.task in ["default", "seq2seq-lm"]: flattened_output = super()._flatten_past_key_values_(flattened_output, name, idx, t) else: flattened_output = super(OnnxSeq2SeqConfigWithPast, self)._flatten_past_key_values_( flattened_output, name, idx, t )
27182812/ChatGLM-LLaMA-chinese-insturct
4,263
src/transformers/models/blenderbot_small/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) _import_structure = { "configuration_blenderbot_small": [ "BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP", "BlenderbotSmallConfig", "BlenderbotSmallOnnxConfig", ], "tokenization_blenderbot_small": ["BlenderbotSmallTokenizer"], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_blenderbot_small_fast"] = ["BlenderbotSmallTokenizerFast"] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_blenderbot_small"] = [ "BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST", "BlenderbotSmallForCausalLM", "BlenderbotSmallForConditionalGeneration", "BlenderbotSmallModel", "BlenderbotSmallPreTrainedModel", ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_tf_blenderbot_small"] = [ "TFBlenderbotSmallForConditionalGeneration", "TFBlenderbotSmallModel", "TFBlenderbotSmallPreTrainedModel", ] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_flax_blenderbot_small"] = [ "FlaxBlenderbotSmallForConditionalGeneration", "FlaxBlenderbotSmallModel", "FlaxBlenderbotSmallPreTrainedModel", ] if TYPE_CHECKING: from .configuration_blenderbot_small import ( BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotSmallConfig, BlenderbotSmallOnnxConfig, ) from .tokenization_blenderbot_small import BlenderbotSmallTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_blenderbot_small_fast import BlenderbotSmallTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_blenderbot_small import ( BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST, BlenderbotSmallForCausalLM, BlenderbotSmallForConditionalGeneration, BlenderbotSmallModel, BlenderbotSmallPreTrainedModel, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_blenderbot_small import ( TFBlenderbotSmallForConditionalGeneration, TFBlenderbotSmallModel, TFBlenderbotSmallPreTrainedModel, ) try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_blenderbot_small import ( FlaxBlenderbotSmallForConditionalGeneration, FlaxBlenderbotSmallModel, FlaxBlenderbotSmallPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
65,923
src/transformers/models/blenderbot_small/modeling_flax_blenderbot_small.py
# coding=utf-8 # Copyright 2021 The Facebook, Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Flax BlenderbotSmall model.""" import math import random from functools import partial from typing import Callable, Optional, Tuple import flax.linen as nn import jax import jax.numpy as jnp import numpy as np from flax.core.frozen_dict import FrozenDict, freeze, unfreeze from flax.linen import combine_masks, make_causal_mask from flax.linen.attention import dot_product_attention_weights from flax.traverse_util import flatten_dict, unflatten_dict from jax import lax from jax.random import PRNGKey from ...modeling_flax_outputs import ( FlaxBaseModelOutput, FlaxBaseModelOutputWithPastAndCrossAttentions, FlaxCausalLMOutputWithCrossAttentions, FlaxSeq2SeqLMOutput, FlaxSeq2SeqModelOutput, ) from ...modeling_flax_utils import ( ACT2FN, FlaxPreTrainedModel, append_call_sample_docstring, append_replace_return_docstrings, overwrite_call_docstring, ) from ...utils import add_start_docstrings, logging, replace_return_docstrings from .configuration_blenderbot_small import BlenderbotSmallConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "facebook/blenderbot_small-90M" _CONFIG_FOR_DOC = "BlenderbotSmallConfig" BLENDERBOT_SMALL_START_DOCSTRING = r""" This model inherits from [`FlaxPreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a Flax Linen [flax.nn.Module](https://flax.readthedocs.io/en/latest/_autosummary/flax.nn.module.html) subclass. Use it as a regular Flax Module and refer to the Flax documentation for all matter related to general usage and behavior. Finally, this model supports inherent JAX features such as: - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit) - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation) - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap) - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap) Parameters: config ([`BlenderbotSmallConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~FlaxPreTrainedModel.from_pretrained`] method to load the model weights. dtype (`jax.numpy.dtype`, *optional*, defaults to `jax.numpy.float32`): The data type of the computation. Can be one of `jax.numpy.float32`, `jax.numpy.float16` (on GPUs) and `jax.numpy.bfloat16` (on TPUs). This can be used to enable mixed-precision training or half-precision inference on GPUs or TPUs. If specified all the computation will be performed with the given `dtype`. **Note that this only specifies the dtype of the computation and does not influence the dtype of model parameters.** If you wish to change the dtype of the model parameters, see [`~FlaxPreTrainedModel.to_fp16`] and [`~FlaxPreTrainedModel.to_bf16`]. """ BLENDERBOT_SMALL_INPUTS_DOCSTRING = r""" Args: input_ids (`jnp.ndarray` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`jnp.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) decoder_input_ids (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`, *optional*): Indices of decoder input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are decoder input IDs?](../glossary#decoder-input-ids) For translation and summarization training, `decoder_input_ids` should be provided. If no `decoder_input_ids` is provided, the model will create this tensor by shifting the `input_ids` to the right for denoising pre-training following the paper. decoder_attention_mask (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`, *optional*): Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also be used by default. If you want to change padding behavior, you should modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more information on the default strategy. position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. decoder_position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each decoder input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ BLENDERBOT_SMALL_ENCODE_INPUTS_DOCSTRING = r""" Args: input_ids (`jnp.ndarray` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`jnp.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ BLENDERBOT_SMALL_DECODE_INPUTS_DOCSTRING = r""" Args: decoder_input_ids (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`): Indices of decoder input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are decoder input IDs?](../glossary#decoder-input-ids) For translation and summarization training, `decoder_input_ids` should be provided. If no `decoder_input_ids` is provided, the model will create this tensor by shifting the `input_ids` to the right for denoising pre-training following the paper. encoder_outputs (`tuple(tuple(jnp.ndarray)`): Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`) `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (`jnp.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) decoder_attention_mask (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`, *optional*): Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also be used by default. If you want to change padding behavior, you should modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more information on the default strategy. decoder_position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each decoder input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. past_key_values (`Dict[str, np.ndarray]`, *optional*, returned by `init_cache` or when passing previous `past_key_values`): Dictionary of pre-computed hidden-states (key and values in the attention blocks) that can be used for fast auto-regressive decoding. Pre-computed key and value hidden-states are of shape *[batch_size, max_length]*. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ # Copied from transformers.models.bart.modeling_flax_bart.shift_tokens_right def shift_tokens_right(input_ids: jnp.ndarray, pad_token_id: int, decoder_start_token_id: int) -> jnp.ndarray: """ Shift input ids one token to the right. """ shifted_input_ids = np.zeros_like(input_ids) shifted_input_ids[:, 1:] = input_ids[:, :-1] shifted_input_ids[:, 0] = decoder_start_token_id shifted_input_ids = np.where(shifted_input_ids == -100, pad_token_id, shifted_input_ids) return shifted_input_ids # Copied from transformers.models.bart.modeling_flax_bart.FlaxBartAttention with Bart->BlenderbotSmall class FlaxBlenderbotSmallAttention(nn.Module): config: BlenderbotSmallConfig embed_dim: int num_heads: int dropout: float = 0.0 causal: bool = False bias: bool = True dtype: jnp.dtype = jnp.float32 # the dtype of the computation def setup(self) -> None: self.head_dim = self.embed_dim // self.num_heads if self.head_dim * self.num_heads != self.embed_dim: raise ValueError( f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" f" and `num_heads`: {self.num_heads})." ) dense = partial( nn.Dense, self.embed_dim, use_bias=self.bias, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(self.config.init_std), ) self.q_proj, self.k_proj, self.v_proj = dense(), dense(), dense() self.out_proj = dense() self.dropout_layer = nn.Dropout(rate=self.dropout) if self.causal: self.causal_mask = make_causal_mask( jnp.ones((1, self.config.max_position_embeddings), dtype="bool"), dtype="bool" ) def _split_heads(self, hidden_states): return hidden_states.reshape(hidden_states.shape[:2] + (self.num_heads, self.head_dim)) def _merge_heads(self, hidden_states): return hidden_states.reshape(hidden_states.shape[:2] + (self.embed_dim,)) @nn.compact def _concatenate_to_cache(self, key, value, query, attention_mask): """ This function takes projected key, value states from a single input token and concatenates the states to cached states from previous steps. This function is slighly adapted from the official Flax repository: https://github.com/google/flax/blob/491ce18759622506588784b4fca0e4bf05f8c8cd/flax/linen/attention.py#L252 """ # detect if we're initializing by absence of existing cache data. is_initialized = self.has_variable("cache", "cached_key") cached_key = self.variable("cache", "cached_key", jnp.zeros, key.shape, key.dtype) cached_value = self.variable("cache", "cached_value", jnp.zeros, value.shape, value.dtype) cache_index = self.variable("cache", "cache_index", lambda: jnp.array(0, dtype=jnp.int32)) if is_initialized: *batch_dims, max_length, num_heads, depth_per_head = cached_key.value.shape # update key, value caches with our new 1d spatial slices cur_index = cache_index.value indices = (0,) * len(batch_dims) + (cur_index, 0, 0) key = lax.dynamic_update_slice(cached_key.value, key, indices) value = lax.dynamic_update_slice(cached_value.value, value, indices) cached_key.value = key cached_value.value = value num_updated_cache_vectors = query.shape[1] cache_index.value = cache_index.value + num_updated_cache_vectors # causal mask for cached decoder self-attention: our single query position should only attend to those key positions that have already been generated and cached, not the remaining zero elements. pad_mask = jnp.broadcast_to( jnp.arange(max_length) < cur_index + num_updated_cache_vectors, tuple(batch_dims) + (1, num_updated_cache_vectors, max_length), ) attention_mask = combine_masks(pad_mask, attention_mask) return key, value, attention_mask def __call__( self, hidden_states: jnp.ndarray, key_value_states: Optional[jnp.ndarray] = None, attention_mask: Optional[jnp.ndarray] = None, init_cache: bool = False, deterministic: bool = True, ) -> Tuple[jnp.ndarray]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None batch_size = hidden_states.shape[0] # get query proj query_states = self.q_proj(hidden_states) # get key, value proj if is_cross_attention: # cross_attentions key_states = self.k_proj(key_value_states) value_states = self.v_proj(key_value_states) else: # self_attention key_states = self.k_proj(hidden_states) value_states = self.v_proj(hidden_states) query_states = self._split_heads(query_states) key_states = self._split_heads(key_states) value_states = self._split_heads(value_states) # handle cache prepare causal attention mask if self.causal: query_length, key_length = query_states.shape[1], key_states.shape[1] if self.has_variable("cache", "cached_key"): mask_shift = self.variables["cache"]["cache_index"] max_decoder_length = self.variables["cache"]["cached_key"].shape[1] causal_mask = lax.dynamic_slice( self.causal_mask, (0, 0, mask_shift, 0), (1, 1, query_length, max_decoder_length) ) else: causal_mask = self.causal_mask[:, :, :query_length, :key_length] causal_mask = jnp.broadcast_to(causal_mask, (batch_size,) + causal_mask.shape[1:]) # combine masks if needed if attention_mask is not None and self.causal: attention_mask = jnp.broadcast_to(jnp.expand_dims(attention_mask, axis=(-3, -2)), causal_mask.shape) attention_mask = combine_masks(attention_mask, causal_mask) elif self.causal: attention_mask = causal_mask elif attention_mask is not None: attention_mask = jnp.expand_dims(attention_mask, axis=(-3, -2)) # During fast autoregressive decoding, we feed one position at a time, # and cache the keys and values step by step. if self.causal and (self.has_variable("cache", "cached_key") or init_cache): key_states, value_states, attention_mask = self._concatenate_to_cache( key_states, value_states, query_states, attention_mask ) # Convert the boolean attention mask to an attention bias. if attention_mask is not None: # attention mask in the form of attention bias attention_bias = lax.select( attention_mask > 0, jnp.full(attention_mask.shape, 0.0).astype(self.dtype), jnp.full(attention_mask.shape, jnp.finfo(self.dtype).min).astype(self.dtype), ) else: attention_bias = None dropout_rng = None if not deterministic and self.dropout > 0.0: dropout_rng = self.make_rng("dropout") attn_weights = dot_product_attention_weights( query_states, key_states, bias=attention_bias, dropout_rng=dropout_rng, dropout_rate=self.dropout, broadcast_dropout=True, deterministic=deterministic, dtype=self.dtype, precision=None, ) attn_output = jnp.einsum("...hqk,...khd->...qhd", attn_weights, value_states) attn_output = self._merge_heads(attn_output) attn_output = self.out_proj(attn_output) return attn_output, attn_weights # Copied from transformers.models.bart.modeling_flax_bart.FlaxBartEncoderLayer with Bart->BlenderbotSmall class FlaxBlenderbotSmallEncoderLayer(nn.Module): config: BlenderbotSmallConfig dtype: jnp.dtype = jnp.float32 def setup(self) -> None: self.embed_dim = self.config.d_model self.self_attn = FlaxBlenderbotSmallAttention( config=self.config, embed_dim=self.embed_dim, num_heads=self.config.encoder_attention_heads, dropout=self.config.attention_dropout, dtype=self.dtype, ) self.self_attn_layer_norm = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05) self.dropout_layer = nn.Dropout(rate=self.config.dropout) self.activation_fn = ACT2FN[self.config.activation_function] self.activation_dropout_layer = nn.Dropout(rate=self.config.activation_dropout) self.fc1 = nn.Dense( self.config.encoder_ffn_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(self.config.init_std), ) self.fc2 = nn.Dense( self.embed_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(self.config.init_std) ) self.final_layer_norm = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05) def __call__( self, hidden_states: jnp.ndarray, attention_mask: jnp.ndarray, output_attentions: bool = True, deterministic: bool = True, ) -> Tuple[jnp.ndarray]: residual = hidden_states hidden_states, attn_weights = self.self_attn(hidden_states=hidden_states, attention_mask=attention_mask) hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout_layer(hidden_states, deterministic=deterministic) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs # Copied from transformers.models.bart.modeling_flax_bart.FlaxBartEncoderLayerCollection with Bart->BlenderbotSmall class FlaxBlenderbotSmallEncoderLayerCollection(nn.Module): config: BlenderbotSmallConfig dtype: jnp.dtype = jnp.float32 # the dtype of the computation def setup(self): self.layers = [ FlaxBlenderbotSmallEncoderLayer(self.config, name=str(i), dtype=self.dtype) for i in range(self.config.encoder_layers) ] self.layerdrop = self.config.encoder_layerdrop def __call__( self, hidden_states, attention_mask, deterministic: bool = True, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, ): all_attentions = () if output_attentions else None all_hidden_states = () if output_hidden_states else None for encoder_layer in self.layers: if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) if not deterministic and (dropout_probability < self.layerdrop): # skip the layer layer_outputs = (None, None) else: layer_outputs = encoder_layer( hidden_states, attention_mask, output_attentions, deterministic, ) hidden_states = layer_outputs[0] if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) if output_hidden_states: all_hidden_states += (hidden_states,) outputs = (hidden_states, all_hidden_states, all_attentions) if not return_dict: return tuple(v for v in outputs if v is not None) return FlaxBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_attentions ) # Copied from transformers.models.bart.modeling_flax_bart.FlaxBartDecoderLayer with Bart->BlenderbotSmall class FlaxBlenderbotSmallDecoderLayer(nn.Module): config: BlenderbotSmallConfig dtype: jnp.dtype = jnp.float32 def setup(self) -> None: self.embed_dim = self.config.d_model self.self_attn = FlaxBlenderbotSmallAttention( config=self.config, embed_dim=self.embed_dim, num_heads=self.config.decoder_attention_heads, dropout=self.config.attention_dropout, causal=True, dtype=self.dtype, ) self.dropout_layer = nn.Dropout(rate=self.config.dropout) self.activation_fn = ACT2FN[self.config.activation_function] self.activation_dropout_layer = nn.Dropout(rate=self.config.activation_dropout) self.self_attn_layer_norm = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05) self.encoder_attn = FlaxBlenderbotSmallAttention( config=self.config, embed_dim=self.embed_dim, num_heads=self.config.decoder_attention_heads, dropout=self.config.attention_dropout, dtype=self.dtype, ) self.encoder_attn_layer_norm = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05) self.fc1 = nn.Dense( self.config.decoder_ffn_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(self.config.init_std), ) self.fc2 = nn.Dense( self.embed_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(self.config.init_std) ) self.final_layer_norm = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05) def __call__( self, hidden_states: jnp.ndarray, attention_mask: jnp.ndarray, encoder_hidden_states: Optional[jnp.ndarray] = None, encoder_attention_mask: Optional[jnp.ndarray] = None, init_cache: bool = False, output_attentions: bool = True, deterministic: bool = True, ) -> Tuple[jnp.ndarray]: residual = hidden_states # Self Attention hidden_states, self_attn_weights = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, init_cache=init_cache ) hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Cross-Attention Block cross_attn_weights = None if encoder_hidden_states is not None: residual = hidden_states hidden_states, cross_attn_weights = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, ) hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) hidden_states = residual + hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # Fully Connected residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout_layer(hidden_states, deterministic=deterministic) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs += (self_attn_weights, cross_attn_weights) return outputs # Copied from transformers.models.bart.modeling_flax_bart.FlaxBartDecoderLayerCollection with Bart->BlenderbotSmall class FlaxBlenderbotSmallDecoderLayerCollection(nn.Module): config: BlenderbotSmallConfig dtype: jnp.dtype = jnp.float32 # the dtype of the computation def setup(self): self.layers = [ FlaxBlenderbotSmallDecoderLayer(self.config, name=str(i), dtype=self.dtype) for i in range(self.config.decoder_layers) ] self.layerdrop = self.config.decoder_layerdrop def __call__( self, hidden_states, attention_mask, encoder_hidden_states: Optional[jnp.ndarray] = None, encoder_attention_mask: Optional[jnp.ndarray] = None, deterministic: bool = True, init_cache: bool = False, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, ): # decoder layers all_hidden_states = () if output_hidden_states else None all_self_attns = () if output_attentions else None all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None for decoder_layer in self.layers: if output_hidden_states: all_hidden_states += (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) if not deterministic and (dropout_probability < self.layerdrop): layer_outputs = (None, None, None) else: layer_outputs = decoder_layer( hidden_states, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, init_cache=init_cache, output_attentions=output_attentions, deterministic=deterministic, ) hidden_states = layer_outputs[0] if output_attentions: all_self_attns += (layer_outputs[1],) if encoder_hidden_states is not None: all_cross_attentions += (layer_outputs[2],) # add hidden states from the last decoder layer if output_hidden_states: all_hidden_states += (hidden_states,) outputs = [hidden_states, all_hidden_states, all_self_attns, all_cross_attentions] if not return_dict: return tuple(v for v in outputs if v is not None) return FlaxBaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attns, cross_attentions=all_cross_attentions, ) class FlaxBlenderbotSmallEncoder(nn.Module): config: BlenderbotSmallConfig embed_tokens: nn.Embed dtype: jnp.dtype = jnp.float32 # the dtype of the computation def setup(self): self.dropout_layer = nn.Dropout(rate=self.config.dropout) embed_dim = self.config.d_model self.padding_idx = self.config.pad_token_id self.max_source_positions = self.config.max_position_embeddings self.embed_scale = math.sqrt(embed_dim) if self.config.scale_embedding else 1.0 self.embed_positions = nn.Embed( self.config.max_position_embeddings, embed_dim, embedding_init=jax.nn.initializers.normal(self.config.init_std), ) self.layers = FlaxBlenderbotSmallEncoderLayerCollection(self.config, self.dtype) self.layernorm_embedding = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05) def __call__( self, input_ids, attention_mask, position_ids, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, deterministic: bool = True, ): input_shape = input_ids.shape input_ids = input_ids.reshape(-1, input_shape[-1]) inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale embed_pos = self.embed_positions(position_ids) hidden_states = inputs_embeds + embed_pos hidden_states = self.layernorm_embedding(hidden_states) hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) outputs = self.layers( hidden_states, attention_mask, deterministic=deterministic, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if not return_dict: return outputs return FlaxBaseModelOutput( last_hidden_state=outputs.last_hidden_state, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) class FlaxBlenderbotSmallDecoder(nn.Module): config: BlenderbotSmallConfig embed_tokens: nn.Embed dtype: jnp.dtype = jnp.float32 # the dtype of the computation def setup(self): self.dropout_layer = nn.Dropout(rate=self.config.dropout) embed_dim = self.config.d_model self.padding_idx = self.config.pad_token_id self.max_target_positions = self.config.max_position_embeddings self.embed_scale = math.sqrt(self.config.d_model) if self.config.scale_embedding else 1.0 self.embed_positions = nn.Embed( self.config.max_position_embeddings, embed_dim, embedding_init=jax.nn.initializers.normal(self.config.init_std), ) self.layers = FlaxBlenderbotSmallDecoderLayerCollection(self.config, self.dtype) self.layernorm_embedding = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05) def __call__( self, input_ids, attention_mask, position_ids, encoder_hidden_states: Optional[jnp.ndarray] = None, encoder_attention_mask: Optional[jnp.ndarray] = None, init_cache: bool = False, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, deterministic: bool = True, ): input_shape = input_ids.shape input_ids = input_ids.reshape(-1, input_shape[-1]) inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale # embed positions positions = self.embed_positions(position_ids) # BlenderbotSmall applies layer norm on inputs_embeds in decoder inputs_embeds = self.layernorm_embedding(inputs_embeds) hidden_states = inputs_embeds + positions hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) outputs = self.layers( hidden_states, attention_mask, encoder_hidden_states, encoder_attention_mask, deterministic=deterministic, init_cache=init_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if not return_dict: return outputs return FlaxBaseModelOutputWithPastAndCrossAttentions( last_hidden_state=outputs.last_hidden_state, hidden_states=outputs.hidden_states, attentions=outputs.attentions, cross_attentions=outputs.cross_attentions, ) # Copied from transformers.models.bart.modeling_flax_bart.FlaxBartModule with Bart->BlenderbotSmall class FlaxBlenderbotSmallModule(nn.Module): config: BlenderbotSmallConfig dtype: jnp.dtype = jnp.float32 # the dtype of the computation def setup(self): self.shared = nn.Embed( self.config.vocab_size, self.config.d_model, embedding_init=jax.nn.initializers.normal(self.config.init_std), dtype=self.dtype, ) self.encoder = FlaxBlenderbotSmallEncoder(self.config, dtype=self.dtype, embed_tokens=self.shared) self.decoder = FlaxBlenderbotSmallDecoder(self.config, dtype=self.dtype, embed_tokens=self.shared) def _get_encoder_module(self): return self.encoder def _get_decoder_module(self): return self.decoder def __call__( self, input_ids, attention_mask, decoder_input_ids, decoder_attention_mask, position_ids, decoder_position_ids, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, deterministic: bool = True, ): encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, position_ids=position_ids, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, deterministic=deterministic, ) decoder_outputs = self.decoder( input_ids=decoder_input_ids, attention_mask=decoder_attention_mask, position_ids=decoder_position_ids, encoder_hidden_states=encoder_outputs[0], encoder_attention_mask=attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, deterministic=deterministic, ) if not return_dict: return decoder_outputs + encoder_outputs return FlaxSeq2SeqModelOutput( last_hidden_state=decoder_outputs.last_hidden_state, decoder_hidden_states=decoder_outputs.hidden_states, decoder_attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, encoder_last_hidden_state=encoder_outputs.last_hidden_state, encoder_hidden_states=encoder_outputs.hidden_states, encoder_attentions=encoder_outputs.attentions, ) class FlaxBlenderbotSmallPreTrainedModel(FlaxPreTrainedModel): config_class = BlenderbotSmallConfig base_model_prefix: str = "model" module_class: nn.Module = None def __init__( self, config: BlenderbotSmallConfig, input_shape: Tuple[int] = (1, 1), seed: int = 0, dtype: jnp.dtype = jnp.float32, _do_init: bool = True, **kwargs, ): module = self.module_class(config=config, dtype=dtype, **kwargs) super().__init__(config, module, input_shape=input_shape, seed=seed, dtype=dtype, _do_init=_do_init) def init_weights(self, rng: jax.random.PRNGKey, input_shape: Tuple, params: FrozenDict = None) -> FrozenDict: # init input tensors input_ids = jnp.zeros(input_shape, dtype="i4") # make sure initialization pass will work for FlaxBlenderbotSmallForSequenceClassificationModule input_ids = input_ids.at[(..., -1)].set(self.config.eos_token_id) attention_mask = jnp.ones_like(input_ids) decoder_input_ids = input_ids decoder_attention_mask = jnp.ones_like(input_ids) batch_size, sequence_length = input_ids.shape position_ids = jnp.broadcast_to(jnp.arange(sequence_length)[None, :], (batch_size, sequence_length)) decoder_position_ids = jnp.broadcast_to(jnp.arange(sequence_length)[None, :], (batch_size, sequence_length)) params_rng, dropout_rng = jax.random.split(rng) rngs = {"params": params_rng, "dropout": dropout_rng} random_params = self.module.init( rngs, input_ids, attention_mask, decoder_input_ids, decoder_attention_mask, position_ids, decoder_position_ids, )["params"] if params is not None: random_params = flatten_dict(unfreeze(random_params)) params = flatten_dict(unfreeze(params)) for missing_key in self._missing_keys: params[missing_key] = random_params[missing_key] self._missing_keys = set() return freeze(unflatten_dict(params)) else: return random_params def init_cache(self, batch_size, max_length, encoder_outputs): r""" Args: batch_size (`int`): batch_size used for fast auto-regressive decoding. Defines the batch size of the initialized cache. max_length (`int`): maximum possible length for auto-regressive decoding. Defines the sequence length of the initialized cache. encoder_outputs (`Union[FlaxBaseModelOutput, tuple(tuple(jnp.ndarray)]`): `encoder_outputs` consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`). `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. """ # init input variables to retrieve cache decoder_input_ids = jnp.ones((batch_size, max_length), dtype="i4") decoder_attention_mask = jnp.ones_like(decoder_input_ids) decoder_position_ids = jnp.broadcast_to( jnp.arange(jnp.atleast_2d(decoder_input_ids).shape[-1]), decoder_input_ids.shape ) def _decoder_forward(module, decoder_input_ids, decoder_attention_mask, decoder_position_ids, **kwargs): decoder_module = module._get_decoder_module() return decoder_module( decoder_input_ids, decoder_attention_mask, decoder_position_ids, **kwargs, ) init_variables = self.module.init( jax.random.PRNGKey(0), decoder_input_ids=decoder_input_ids, decoder_attention_mask=decoder_attention_mask, decoder_position_ids=decoder_position_ids, encoder_hidden_states=encoder_outputs[0], init_cache=True, method=_decoder_forward, # we only need to call the decoder to init the cache ) return unfreeze(init_variables["cache"]) @add_start_docstrings(BLENDERBOT_SMALL_ENCODE_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=FlaxBaseModelOutput, config_class=BlenderbotSmallConfig) def encode( self, input_ids: jnp.ndarray, attention_mask: Optional[jnp.ndarray] = None, position_ids: Optional[jnp.ndarray] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, train: bool = False, params: dict = None, dropout_rng: PRNGKey = None, ): r""" Returns: Example: ```python >>> from transformers import AutoTokenizer, FlaxBlenderbotSmallForConditionalGeneration >>> model = FlaxBlenderbotSmallForConditionalGeneration.from_pretrained("facebook/blenderbot_small-90M") >>> tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M") >>> text = "My friends are cool but they eat too many carbs." >>> inputs = tokenizer(text, max_length=1024, return_tensors="np") >>> encoder_outputs = model.encode(**inputs) ```""" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.return_dict if attention_mask is None: attention_mask = jnp.ones_like(input_ids) if position_ids is None: batch_size, sequence_length = input_ids.shape position_ids = jnp.broadcast_to(jnp.arange(sequence_length)[None, :], (batch_size, sequence_length)) # Handle any PRNG if needed rngs = {} if dropout_rng is not None: rngs["dropout"] = dropout_rng def _encoder_forward(module, input_ids, attention_mask, position_ids, **kwargs): encode_module = module._get_encoder_module() return encode_module(input_ids, attention_mask, position_ids, **kwargs) return self.module.apply( {"params": params or self.params}, input_ids=jnp.array(input_ids, dtype="i4"), attention_mask=jnp.array(attention_mask, dtype="i4"), position_ids=jnp.array(position_ids, dtype="i4"), output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, deterministic=not train, rngs=rngs, method=_encoder_forward, ) @add_start_docstrings(BLENDERBOT_SMALL_DECODE_INPUTS_DOCSTRING) @replace_return_docstrings( output_type=FlaxBaseModelOutputWithPastAndCrossAttentions, config_class=BlenderbotSmallConfig ) def decode( self, decoder_input_ids, encoder_outputs, encoder_attention_mask: Optional[jnp.ndarray] = None, decoder_attention_mask: Optional[jnp.ndarray] = None, decoder_position_ids: Optional[jnp.ndarray] = None, past_key_values: dict = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, train: bool = False, params: dict = None, dropout_rng: PRNGKey = None, ): r""" Returns: Example: ```python >>> import jax.numpy as jnp >>> from transformers import AutoTokenizer, FlaxBlenderbotSmallForConditionalGeneration >>> model = FlaxBlenderbotSmallForConditionalGeneration.from_pretrained("facebook/blenderbot_small-90M") >>> tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M") >>> text = "My friends are cool but they eat too many carbs." >>> inputs = tokenizer(text, max_length=1024, return_tensors="np") >>> encoder_outputs = model.encode(**inputs) >>> decoder_start_token_id = model.config.decoder_start_token_id >>> decoder_input_ids = jnp.ones((inputs.input_ids.shape[0], 1), dtype="i4") * decoder_start_token_id >>> outputs = model.decode(decoder_input_ids, encoder_outputs) >>> last_decoder_hidden_states = outputs.last_hidden_state ```""" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.return_dict encoder_hidden_states = encoder_outputs[0] if encoder_attention_mask is None: batch_size, sequence_length = encoder_hidden_states.shape[:2] encoder_attention_mask = jnp.ones((batch_size, sequence_length)) batch_size, sequence_length = decoder_input_ids.shape if decoder_attention_mask is None: decoder_attention_mask = jnp.ones((batch_size, sequence_length)) if decoder_position_ids is None: if past_key_values is not None: raise ValueError("Make sure to provide `decoder_position_ids` when passing `past_key_values`.") decoder_position_ids = jnp.broadcast_to( jnp.arange(sequence_length)[None, :], (batch_size, sequence_length) ) # Handle any PRNG if needed rngs = {} if dropout_rng is not None: rngs["dropout"] = dropout_rng inputs = {"params": params or self.params} # if past_key_values are passed then cache is already initialized a private flag init_cache has to be # passed down to ensure cache is used. It has to be made sure that cache is marked as mutable so that # it can be changed by FlaxBlenderbotSmallAttention module if past_key_values: inputs["cache"] = past_key_values mutable = ["cache"] else: mutable = False def _decoder_forward(module, decoder_input_ids, decoder_attention_mask, decoder_position_ids, **kwargs): decoder_module = module._get_decoder_module() return decoder_module( decoder_input_ids, decoder_attention_mask, decoder_position_ids, **kwargs, ) outputs = self.module.apply( inputs, decoder_input_ids=jnp.array(decoder_input_ids, dtype="i4"), decoder_attention_mask=jnp.array(decoder_attention_mask, dtype="i4"), decoder_position_ids=jnp.array(decoder_position_ids, dtype="i4"), encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=jnp.array(encoder_attention_mask, dtype="i4"), output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, deterministic=not train, rngs=rngs, mutable=mutable, method=_decoder_forward, ) # add updated cache to model output if past_key_values is not None and return_dict: outputs, past = outputs outputs["past_key_values"] = unfreeze(past["cache"]) return outputs elif past_key_values is not None and not return_dict: outputs, past = outputs outputs = outputs[:1] + (unfreeze(past["cache"]),) + outputs[1:] return outputs def __call__( self, input_ids: jnp.ndarray, attention_mask: Optional[jnp.ndarray] = None, decoder_input_ids: Optional[jnp.ndarray] = None, decoder_attention_mask: Optional[jnp.ndarray] = None, position_ids: Optional[jnp.ndarray] = None, decoder_position_ids: Optional[jnp.ndarray] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, train: bool = False, params: dict = None, dropout_rng: PRNGKey = None, ): output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.return_dict # prepare encoder inputs if attention_mask is None: attention_mask = jnp.ones_like(input_ids) if position_ids is None: batch_size, sequence_length = input_ids.shape position_ids = jnp.broadcast_to(jnp.arange(sequence_length)[None, :], (batch_size, sequence_length)) # prepare decoder inputs if decoder_input_ids is None: decoder_input_ids = shift_tokens_right( input_ids, self.config.pad_token_id, decoder_start_token_id=self.config.decoder_start_token_id ) if decoder_attention_mask is None: decoder_attention_mask = jnp.ones_like(decoder_input_ids) if decoder_position_ids is None: batch_size, sequence_length = decoder_input_ids.shape decoder_position_ids = jnp.broadcast_to( jnp.arange(sequence_length)[None, :], (batch_size, sequence_length) ) # Handle any PRNG if needed rngs = {"dropout": dropout_rng} if dropout_rng is not None else {} return self.module.apply( {"params": params or self.params}, input_ids=jnp.array(input_ids, dtype="i4"), attention_mask=jnp.array(attention_mask, dtype="i4"), position_ids=jnp.array(position_ids, dtype="i4"), decoder_input_ids=jnp.array(decoder_input_ids, dtype="i4"), decoder_attention_mask=jnp.array(decoder_attention_mask, dtype="i4"), decoder_position_ids=jnp.array(decoder_position_ids, dtype="i4"), output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, deterministic=not train, rngs=rngs, ) @add_start_docstrings( "The bare BlenderbotSmall Model transformer outputting raw hidden-states without any specific head on top.", BLENDERBOT_SMALL_START_DOCSTRING, ) class FlaxBlenderbotSmallModel(FlaxBlenderbotSmallPreTrainedModel): config: BlenderbotSmallConfig dtype: jnp.dtype = jnp.float32 # the dtype of the computation module_class = FlaxBlenderbotSmallModule append_call_sample_docstring(FlaxBlenderbotSmallModel, _CHECKPOINT_FOR_DOC, FlaxSeq2SeqModelOutput, _CONFIG_FOR_DOC) # Copied from transformers.models.bart.modeling_flax_bart.FlaxBartForConditionalGenerationModule with Bart->BlenderbotSmall class FlaxBlenderbotSmallForConditionalGenerationModule(nn.Module): config: BlenderbotSmallConfig dtype: jnp.dtype = jnp.float32 bias_init: Callable[..., jnp.ndarray] = jax.nn.initializers.zeros def setup(self): self.model = FlaxBlenderbotSmallModule(config=self.config, dtype=self.dtype) self.lm_head = nn.Dense( self.model.shared.num_embeddings, use_bias=False, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(self.config.init_std), ) self.final_logits_bias = self.param("final_logits_bias", self.bias_init, (1, self.model.shared.num_embeddings)) def _get_encoder_module(self): return self.model.encoder def _get_decoder_module(self): return self.model.decoder def __call__( self, input_ids, attention_mask, decoder_input_ids, decoder_attention_mask, position_ids, decoder_position_ids, output_attentions: bool = False, output_hidden_states: bool = False, return_dict: bool = True, deterministic: bool = True, ): outputs = self.model( input_ids=input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, decoder_attention_mask=decoder_attention_mask, position_ids=position_ids, decoder_position_ids=decoder_position_ids, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, deterministic=deterministic, ) hidden_states = outputs[0] if self.config.tie_word_embeddings: shared_embedding = self.model.variables["params"]["shared"]["embedding"] lm_logits = self.lm_head.apply({"params": {"kernel": shared_embedding.T}}, hidden_states) else: lm_logits = self.lm_head(hidden_states) lm_logits += jax.lax.stop_gradient(self.final_logits_bias.astype(self.dtype)) if not return_dict: output = (lm_logits,) + outputs[1:] return output return FlaxSeq2SeqLMOutput( logits=lm_logits, decoder_hidden_states=outputs.decoder_hidden_states, decoder_attentions=outputs.decoder_attentions, cross_attentions=outputs.cross_attentions, encoder_last_hidden_state=outputs.encoder_last_hidden_state, encoder_hidden_states=outputs.encoder_hidden_states, encoder_attentions=outputs.encoder_attentions, ) @add_start_docstrings( "The BLENDERBOT_SMALL Model with a language modeling head. Can be used for summarization.", BLENDERBOT_SMALL_START_DOCSTRING, ) class FlaxBlenderbotSmallForConditionalGeneration(FlaxBlenderbotSmallPreTrainedModel): module_class = FlaxBlenderbotSmallForConditionalGenerationModule dtype: jnp.dtype = jnp.float32 @add_start_docstrings(BLENDERBOT_SMALL_DECODE_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=FlaxCausalLMOutputWithCrossAttentions, config_class=BlenderbotSmallConfig) def decode( self, decoder_input_ids, encoder_outputs, encoder_attention_mask: Optional[jnp.ndarray] = None, decoder_attention_mask: Optional[jnp.ndarray] = None, decoder_position_ids: Optional[jnp.ndarray] = None, past_key_values: dict = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, deterministic: bool = True, params: dict = None, dropout_rng: PRNGKey = None, ): r""" Returns: Example: ```python >>> import jax.numpy as jnp >>> from transformers import AutoTokenizer, FlaxBlenderbotSmallForConditionalGeneration >>> model = FlaxBlenderbotSmallForConditionalGeneration.from_pretrained("facebook/blenderbot_small-90M") >>> tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M") >>> text = "My friends are cool but they eat too many carbs." >>> inputs = tokenizer(text, max_length=1024, return_tensors="np") >>> encoder_outputs = model.encode(**inputs) >>> decoder_start_token_id = model.config.decoder_start_token_id >>> decoder_input_ids = jnp.ones((inputs.input_ids.shape[0], 1), dtype="i4") * decoder_start_token_id >>> outputs = model.decode(decoder_input_ids, encoder_outputs) >>> logits = outputs.logits ```""" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.return_dict encoder_hidden_states = encoder_outputs[0] if encoder_attention_mask is None: batch_size, sequence_length = encoder_hidden_states.shape[:2] encoder_attention_mask = jnp.ones((batch_size, sequence_length)) batch_size, sequence_length = decoder_input_ids.shape if decoder_attention_mask is None: decoder_attention_mask = jnp.ones((batch_size, sequence_length)) if decoder_position_ids is None: if past_key_values is not None: raise ValueError("Make sure to provide `decoder_position_ids` when passing `past_key_values`.") decoder_position_ids = jnp.broadcast_to( jnp.arange(sequence_length)[None, :], (batch_size, sequence_length) ) # Handle any PRNG if needed rngs = {} if dropout_rng is not None: rngs["dropout"] = dropout_rng inputs = {"params": params or self.params} # if past_key_values are passed then cache is already initialized a private flag init_cache has to be # passed down to ensure cache is used. It has to be made sure that cache is marked as mutable so that # it can be changed by FlaxBlenderbotSmallAttention module if past_key_values: inputs["cache"] = past_key_values mutable = ["cache"] else: mutable = False def _decoder_forward(module, decoder_input_ids, decoder_attention_mask, decoder_position_ids, **kwargs): decoder_module = module._get_decoder_module() outputs = decoder_module( decoder_input_ids, decoder_attention_mask, decoder_position_ids, **kwargs, ) hidden_states = outputs[0] if self.config.tie_word_embeddings: shared_embedding = module.model.variables["params"]["shared"]["embedding"] lm_logits = module.lm_head.apply({"params": {"kernel": shared_embedding.T}}, hidden_states) else: lm_logits = module.lm_head(hidden_states) lm_logits += module.final_logits_bias.astype(self.dtype) return lm_logits, outputs outputs = self.module.apply( inputs, decoder_input_ids=jnp.array(decoder_input_ids, dtype="i4"), decoder_attention_mask=jnp.array(decoder_attention_mask, dtype="i4"), decoder_position_ids=jnp.array(decoder_position_ids, dtype="i4"), encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=jnp.array(encoder_attention_mask, dtype="i4"), output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, deterministic=deterministic, rngs=rngs, mutable=mutable, method=_decoder_forward, ) if past_key_values is None: lm_logits, decoder_outputs = outputs else: (lm_logits, decoder_outputs), past = outputs if return_dict: outputs = FlaxCausalLMOutputWithCrossAttentions( logits=lm_logits, hidden_states=decoder_outputs.hidden_states, attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, ) else: outputs = (lm_logits,) + decoder_outputs[1:] # add updated cache to model output if past_key_values is not None and return_dict: outputs["past_key_values"] = unfreeze(past["cache"]) return outputs elif past_key_values is not None and not return_dict: outputs = outputs[:1] + (unfreeze(past["cache"]),) + outputs[1:] return outputs def prepare_inputs_for_generation( self, decoder_input_ids, max_length, attention_mask: Optional[jnp.DeviceArray] = None, decoder_attention_mask: Optional[jnp.DeviceArray] = None, encoder_outputs=None, **kwargs, ): # initializing the cache batch_size, seq_length = decoder_input_ids.shape past_key_values = self.init_cache(batch_size, max_length, encoder_outputs) # Note that usually one would have to put 0's in the attention_mask for x > input_ids.shape[-1] and x < cache_length. # But since the decoder uses a causal mask, those positions are masked anyways. # Thus we can create a single static attention_mask here, which is more efficient for compilation extended_attention_mask = jnp.ones((batch_size, max_length), dtype="i4") if decoder_attention_mask is not None: position_ids = decoder_attention_mask.cumsum(axis=-1) - 1 extended_attention_mask = lax.dynamic_update_slice(extended_attention_mask, decoder_attention_mask, (0, 0)) else: position_ids = jnp.broadcast_to(jnp.arange(seq_length, dtype="i4")[None, :], (batch_size, seq_length)) return { "past_key_values": past_key_values, "encoder_outputs": encoder_outputs, "encoder_attention_mask": attention_mask, "decoder_attention_mask": extended_attention_mask, "decoder_position_ids": position_ids, } def update_inputs_for_generation(self, model_outputs, model_kwargs): model_kwargs["past_key_values"] = model_outputs.past_key_values model_kwargs["decoder_position_ids"] = model_kwargs["decoder_position_ids"][:, -1:] + 1 return model_kwargs FLAX_BLENDERBOT_SMALL_CONDITIONAL_GENERATION_DOCSTRING = """ Returns: Summarization example: ```py >>> from transformers import AutoTokenizer, FlaxBlenderbotSmallForConditionalGeneration >>> model = FlaxBlenderbotSmallForConditionalGeneration.from_pretrained("facebook/blenderbot_small-90M") >>> tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M") >>> ARTICLE_TO_SUMMARIZE = "My friends are cool but they eat too many carbs." >>> inputs = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=1024, return_tensors="np") >>> # Generate Summary >>> summary_ids = model.generate(inputs["input_ids"]).sequences >>> print(tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)) ``` Mask filling example: ```py >>> from transformers import AutoTokenizer, FlaxBlenderbotSmallForConditionalGeneration >>> tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M") >>> TXT = "My friends are <mask> but they eat too many carbs." >>> model = FlaxBlenderbotSmallForConditionalGeneration.from_pretrained("facebook/blenderbot_small-90M") >>> input_ids = tokenizer([TXT], return_tensors="np")["input_ids"] >>> logits = model(input_ids).logits >>> masked_index = (input_ids[0] == tokenizer.mask_token_id).nonzero().item() >>> probs = jax.nn.softmax(logits[0, masked_index], axis=0) >>> values, predictions = jax.lax.top_k(probs) >>> tokenizer.decode(predictions).split() ``` """ overwrite_call_docstring( FlaxBlenderbotSmallForConditionalGeneration, BLENDERBOT_SMALL_INPUTS_DOCSTRING + FLAX_BLENDERBOT_SMALL_CONDITIONAL_GENERATION_DOCSTRING, ) append_replace_return_docstrings( FlaxBlenderbotSmallForConditionalGeneration, output_type=FlaxSeq2SeqLMOutput, config_class=_CONFIG_FOR_DOC )
27182812/ChatGLM-LLaMA-chinese-insturct
75,759
src/transformers/models/blenderbot_small/modeling_blenderbot_small.py
# coding=utf-8 # Copyright 2021 The Facebook, Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch BlenderbotSmall model.""" import copy import math import random from typing import List, Optional, Tuple, Union import torch import torch.utils.checkpoint from torch import nn from torch.nn import CrossEntropyLoss from ...activations import ACT2FN from ...modeling_outputs import ( BaseModelOutput, BaseModelOutputWithPastAndCrossAttentions, CausalLMOutputWithCrossAttentions, Seq2SeqLMOutput, Seq2SeqModelOutput, ) from ...modeling_utils import PreTrainedModel from ...utils import ( add_end_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_blenderbot_small import BlenderbotSmallConfig logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = "BlenderbotSmallConfig" BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST = [ "facebook/blenderbot_small-90M", # See all BlenderbotSmall models at https://huggingface.co/models?filter=blenderbot_small ] # Copied from transformers.models.bart.modeling_bart.shift_tokens_right def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int): """ Shift input ids one token to the right. """ shifted_input_ids = input_ids.new_zeros(input_ids.shape) shifted_input_ids[:, 1:] = input_ids[:, :-1].clone() shifted_input_ids[:, 0] = decoder_start_token_id if pad_token_id is None: raise ValueError("self.model.config.pad_token_id has to be defined.") # replace possible -100 values in labels by `pad_token_id` shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id) return shifted_input_ids # Copied from transformers.models.bart.modeling_bart._make_causal_mask def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0): """ Make causal mask used for bi-directional self-attention. """ bsz, tgt_len = input_ids_shape mask = torch.full((tgt_len, tgt_len), torch.tensor(torch.finfo(dtype).min)) mask_cond = torch.arange(mask.size(-1)) mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) mask = mask.to(dtype) if past_key_values_length > 0: mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1) return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length) # Copied from transformers.models.bart.modeling_bart._expand_mask def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ bsz, src_len = mask.size() tgt_len = tgt_len if tgt_len is not None else src_len expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) inverted_mask = 1.0 - expanded_mask return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min) # Copied from transformers.models.blenderbot.modeling_blenderbot.BlenderbotLearnedPositionalEmbedding with Blenderbot->BlenderbotSmall class BlenderbotSmallLearnedPositionalEmbedding(nn.Embedding): """ This module learns positional embeddings up to a fixed maximum size. """ def __init__(self, num_embeddings: int, embedding_dim: int): super().__init__(num_embeddings, embedding_dim) def forward(self, input_ids_shape: torch.Size, past_key_values_length: int = 0): """`input_ids_shape` is expected to be [bsz x seqlen].""" bsz, seq_len = input_ids_shape[:2] positions = torch.arange( past_key_values_length, past_key_values_length + seq_len, dtype=torch.long, device=self.weight.device ) return super().forward(positions) # Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->BlenderbotSmall class BlenderbotSmallAttention(nn.Module): """Multi-headed attention from 'Attention Is All You Need' paper""" def __init__( self, embed_dim: int, num_heads: int, dropout: float = 0.0, is_decoder: bool = False, bias: bool = True, ): super().__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = dropout self.head_dim = embed_dim // num_heads if (self.head_dim * num_heads) != self.embed_dim: raise ValueError( f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" f" and `num_heads`: {num_heads})." ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() def forward( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, _ = hidden_states.size() # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj # `past_key_value[0].shape[2] == key_value_states.shape[1]` # is checking that the `sequence_length` of the `past_key_value` is the same as # the provided `key_value_states` to support prefix tuning if ( is_cross_attention and past_key_value is not None and past_key_value[0].shape[2] == key_value_states.shape[1] ): # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = torch.cat([past_key_value[0], key_states], dim=2) value_states = torch.cat([past_key_value[1], value_states], dim=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) key_states = key_states.reshape(*proj_shape) value_states = value_states.reshape(*proj_shape) src_len = key_states.size(1) attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): raise ValueError( f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" f" {attn_weights.size()}" ) if attention_mask is not None: if attention_mask.size() != (bsz, 1, tgt_len, src_len): raise ValueError( f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" ) attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) attn_weights = nn.functional.softmax(attn_weights, dim=-1) if layer_head_mask is not None: if layer_head_mask.size() != (self.num_heads,): raise ValueError( f"Head mask for a single layer should be of size {(self.num_heads,)}, but is" f" {layer_head_mask.size()}" ) attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) if output_attentions: # this operation is a bit awkward, but it's required to # make sure that attn_weights keeps its gradient. # In order to do so, attn_weights have to be reshaped # twice and have to be reused in the following attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) else: attn_weights_reshaped = None attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training) attn_output = torch.bmm(attn_probs, value_states) if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): raise ValueError( f"`attn_output` should be of size {(bsz * self.num_heads, tgt_len, self.head_dim)}, but is" f" {attn_output.size()}" ) attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) attn_output = attn_output.transpose(1, 2) # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be # partitioned across GPUs when using tensor-parallelism. attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim) attn_output = self.out_proj(attn_output) return attn_output, attn_weights_reshaped, past_key_value # Copied from transformers.models.bart.modeling_bart.BartEncoderLayer with Bart->BlenderbotSmall class BlenderbotSmallEncoderLayer(nn.Module): def __init__(self, config: BlenderbotSmallConfig): super().__init__() self.embed_dim = config.d_model self.self_attn = BlenderbotSmallAttention( embed_dim=self.embed_dim, num_heads=config.encoder_attention_heads, dropout=config.attention_dropout, ) self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.dropout = config.dropout self.activation_fn = ACT2FN[config.activation_function] self.activation_dropout = config.activation_dropout self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim) self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim) self.final_layer_norm = nn.LayerNorm(self.embed_dim) def forward( self, hidden_states: torch.FloatTensor, attention_mask: torch.FloatTensor, layer_head_mask: torch.FloatTensor, output_attentions: Optional[bool] = False, ) -> Tuple[torch.FloatTensor, Optional[torch.FloatTensor]]: """ Args: hidden_states (`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size `(encoder_attention_heads,)`. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. """ residual = hidden_states hidden_states, attn_weights, _ = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) if hidden_states.dtype == torch.float16 and ( torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any() ): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs # Copied from transformers.models.bart.modeling_bart.BartDecoderLayer with Bart->BlenderbotSmall class BlenderbotSmallDecoderLayer(nn.Module): def __init__(self, config: BlenderbotSmallConfig): super().__init__() self.embed_dim = config.d_model self.self_attn = BlenderbotSmallAttention( embed_dim=self.embed_dim, num_heads=config.decoder_attention_heads, dropout=config.attention_dropout, is_decoder=True, ) self.dropout = config.dropout self.activation_fn = ACT2FN[config.activation_function] self.activation_dropout = config.activation_dropout self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.encoder_attn = BlenderbotSmallAttention( self.embed_dim, config.decoder_attention_heads, dropout=config.attention_dropout, is_decoder=True, ) self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim) self.fc2 = nn.Linear(config.decoder_ffn_dim, self.embed_dim) self.final_layer_norm = nn.LayerNorm(self.embed_dim) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, cross_attn_layer_head_mask: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = True, ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]: """ Args: hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` attention_mask (`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (`torch.FloatTensor`): cross attention input to the layer of shape `(batch, seq_len, embed_dim)` encoder_attention_mask (`torch.FloatTensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size `(encoder_attention_heads,)`. cross_attn_layer_head_mask (`torch.FloatTensor`): mask for cross-attention heads in a given layer of size `(decoder_attention_heads,)`. past_key_value (`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. """ residual = hidden_states # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Cross-Attention Block cross_attn_present_key_value = None cross_attn_weights = None if encoder_hidden_states is not None: residual = hidden_states # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=cross_attn_layer_head_mask, past_key_value=cross_attn_past_key_value, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs += (self_attn_weights, cross_attn_weights) if use_cache: outputs += (present_key_value,) return outputs class BlenderbotSmallPreTrainedModel(PreTrainedModel): config_class = BlenderbotSmallConfig base_model_prefix = "model" supports_gradient_checkpointing = True def _init_weights(self, module): std = self.config.init_std if isinstance(module, nn.Linear): module.weight.data.normal_(mean=0.0, std=std) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=std) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, (BlenderbotSmallDecoder, BlenderbotSmallEncoder)): module.gradient_checkpointing = value @property def dummy_inputs(self): pad_token = self.config.pad_token_id input_ids = torch.tensor([[0, 6, 10, 4, 2], [0, 8, 12, 2, pad_token]], device=self.device) dummy_inputs = { "attention_mask": input_ids.ne(pad_token), "input_ids": input_ids, "decoder_input_ids": input_ids, } return dummy_inputs BLENDERBOT_SMALL_START_DOCSTRING = r""" This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`BlenderbotSmallConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ BLENDERBOT_SMALL_GENERATION_EXAMPLE = r""" Conversation example: ```python >>> from transformers import AutoTokenizer, BlenderbotSmallForConditionalGeneration >>> mname = "facebook/blenderbot_small-90M" >>> model = BlenderbotSmallForConditionalGeneration.from_pretrained(mname) >>> tokenizer = AutoTokenizer.from_pretrained(mname) >>> UTTERANCE = "My friends are cool but they eat too many carbs." >>> print("Human: ", UTTERANCE) Human: My friends are cool but they eat too many carbs. >>> inputs = tokenizer([UTTERANCE], return_tensors="pt") >>> reply_ids = model.generate(**inputs) >>> print("Bot: ", tokenizer.batch_decode(reply_ids, skip_special_tokens=True)[0]) Bot: what kind of carbs do they eat? i don't know much about carbs. >>> REPLY = "I'm not sure" >>> print("Human: ", REPLY) Human: I'm not sure >>> NEXT_UTTERANCE = ( ... "My friends are cool but they eat too many carbs.</s> <s>what kind of carbs do they eat? " ... "i don't know much about carbs</s> " ... "<s> I'm not sure." ... ) >>> inputs = tokenizer([NEXT_UTTERANCE], return_tensors="pt") >>> next_reply_ids = model.generate(**inputs) >>> print("Bot: ", tokenizer.batch_decode(next_reply_ids, skip_special_tokens=True)[0]) Bot: they eat a lot of carbs. carbs are high in fat, protein, and carbohydrates. ``` """ BLENDERBOT_SMALL_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Indices of decoder input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are decoder input IDs?](../glossary#decoder-input-ids) BlenderbotSmall uses the `bos_token_id` as the starting token for `decoder_input_ids` generation. If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see `past_key_values`). decoder_attention_mask (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also be used by default. head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. decoder_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. encoder_outputs (`tuple(tuple(torch.FloatTensor)`, *optional*): Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`) `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. decoder_inputs_embeds (`torch.FloatTensor` of shape `(batch_size, target_sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `decoder_input_ids` you can choose to directly pass an embedded representation. If `past_key_values` is used, optionally only the last `decoder_inputs_embeds` have to be input (see `past_key_values`). This is useful if you want more control over how to convert `decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix. If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value of `inputs_embeds`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ class BlenderbotSmallEncoder(BlenderbotSmallPreTrainedModel): """ Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a [`BlenderbotSmallEncoderLayer`]. Args: config: BlenderbotSmallConfig embed_tokens (nn.Embedding): output embedding """ def __init__(self, config: BlenderbotSmallConfig, embed_tokens: Optional[nn.Embedding] = None): super().__init__(config) self.dropout = config.dropout self.layerdrop = config.encoder_layerdrop embed_dim = config.d_model self.padding_idx = config.pad_token_id self.max_source_positions = config.max_position_embeddings self.embed_scale = math.sqrt(embed_dim) if config.scale_embedding else 1.0 if embed_tokens is not None: self.embed_tokens = embed_tokens else: self.embed_tokens = nn.Embedding(config.vocab_size, embed_dim, self.padding_idx) self.embed_positions = BlenderbotSmallLearnedPositionalEmbedding( config.max_position_embeddings, embed_dim, ) self.layers = nn.ModuleList([BlenderbotSmallEncoderLayer(config) for _ in range(config.encoder_layers)]) self.layernorm_embedding = nn.LayerNorm(embed_dim) self.gradient_checkpointing = False # Initialize weights and apply final processing self.post_init() def forward( self, input_ids=None, attention_mask=None, head_mask=None, inputs_embeds=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale embed_pos = self.embed_positions(input_shape) hidden_states = inputs_embeds + embed_pos hidden_states = self.layernorm_embedding(hidden_states) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) # expand attention_mask if attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype) encoder_states = () if output_hidden_states else None all_attentions = () if output_attentions else None # check if head_mask has a correct number of layers specified if desired if head_mask is not None: if head_mask.size()[0] != len(self.layers): raise ValueError( f"The head_mask should be specified for {len(self.layers)} layers, but it is for" f" {head_mask.size()[0]}." ) for idx, encoder_layer in enumerate(self.layers): if output_hidden_states: encoder_states = encoder_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): # skip the layer layer_outputs = (None, None) else: if self.gradient_checkpointing and self.training: def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(encoder_layer), hidden_states, attention_mask, (head_mask[idx] if head_mask is not None else None), ) else: layer_outputs = encoder_layer( hidden_states, attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), output_attentions=output_attentions, ) hidden_states = layer_outputs[0] if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) if output_hidden_states: encoder_states = encoder_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions ) class BlenderbotSmallDecoder(BlenderbotSmallPreTrainedModel): """ Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a [`BlenderbotSmallDecoderLayer`] Args: config: BlenderbotSmallConfig embed_tokens (nn.Embedding): output embedding """ def __init__(self, config: BlenderbotSmallConfig, embed_tokens: Optional[nn.Embedding] = None): super().__init__(config) self.dropout = config.dropout self.layerdrop = config.decoder_layerdrop self.padding_idx = config.pad_token_id self.max_target_positions = config.max_position_embeddings self.embed_scale = math.sqrt(config.d_model) if config.scale_embedding else 1.0 if embed_tokens is not None: self.embed_tokens = embed_tokens else: self.embed_tokens = nn.Embedding(config.vocab_size, config.d_model, self.padding_idx) self.embed_positions = BlenderbotSmallLearnedPositionalEmbedding( config.max_position_embeddings, config.d_model, ) self.layers = nn.ModuleList([BlenderbotSmallDecoderLayer(config) for _ in range(config.decoder_layers)]) self.layernorm_embedding = nn.LayerNorm(config.d_model) self.gradient_checkpointing = False # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embed_tokens def set_input_embeddings(self, value): self.embed_tokens = value # Copied from transformers.models.bart.modeling_bart.BartDecoder._prepare_decoder_attention_mask def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length): # create causal mask # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] combined_attention_mask = None if input_shape[-1] > 1: combined_attention_mask = _make_causal_mask( input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length ).to(inputs_embeds.device) if attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]).to( inputs_embeds.device ) combined_attention_mask = ( expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask ) return combined_attention_mask def forward( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, cross_attn_head_mask=None, past_key_values=None, inputs_embeds=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (`torch.LongTensor` of shape `(batch_size, encoder_sequence_length)`, *optional*): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules in the decoder to avoid performing cross-attention on hidden heads. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") # past_key_values_length past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale attention_mask = self._prepare_decoder_attention_mask( attention_mask, input_shape, inputs_embeds, past_key_values_length ) # expand encoder attention mask if encoder_hidden_states is not None and encoder_attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]) # embed positions positions = self.embed_positions(input_shape, past_key_values_length) # BlenderbotSmall applies layer norm on hidden_states inputs_embeds = self.layernorm_embedding(inputs_embeds) hidden_states = inputs_embeds + positions hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) # decoder layers all_hidden_states = () if output_hidden_states else None all_self_attns = () if output_attentions else None all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None next_decoder_cache = () if use_cache else None # check if head_mask/cross_attn_head_mask has a correct number of layers specified if desired for attn_mask, mask_name in zip([head_mask, cross_attn_head_mask], ["head_mask", "cross_attn_head_mask"]): if attn_mask is not None: if attn_mask.size()[0] != len(self.layers): raise ValueError( f"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for" f" {head_mask.size()[0]}." ) for idx, decoder_layer in enumerate(self.layers): # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) if output_hidden_states: all_hidden_states += (hidden_states,) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): continue past_key_value = past_key_values[idx] if past_key_values is not None else None if self.gradient_checkpointing and self.training: if use_cache: logger.warning_once( "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): # None for past_key_value return module(*inputs, output_attentions, use_cache) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(decoder_layer), hidden_states, attention_mask, encoder_hidden_states, encoder_attention_mask, head_mask[idx] if head_mask is not None else None, cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None, None, ) else: layer_outputs = decoder_layer( hidden_states, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), cross_attn_layer_head_mask=( cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None ), past_key_value=past_key_value, output_attentions=output_attentions, use_cache=use_cache, ) hidden_states = layer_outputs[0] if use_cache: next_decoder_cache += (layer_outputs[3 if output_attentions else 1],) if output_attentions: all_self_attns += (layer_outputs[1],) if encoder_hidden_states is not None: all_cross_attentions += (layer_outputs[2],) # add hidden states from the last decoder layer if output_hidden_states: all_hidden_states += (hidden_states,) next_cache = next_decoder_cache if use_cache else None if not return_dict: return tuple( v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions] if v is not None ) return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=next_cache, hidden_states=all_hidden_states, attentions=all_self_attns, cross_attentions=all_cross_attentions, ) @add_start_docstrings( "The bare BlenderbotSmall Model outputting raw hidden-states without any specific head on top.", BLENDERBOT_SMALL_START_DOCSTRING, ) class BlenderbotSmallModel(BlenderbotSmallPreTrainedModel): _keys_to_ignore_on_load_missing = ["encoder.embed_tokens.weight", "decoder.embed_tokens.weight"] def __init__(self, config: BlenderbotSmallConfig): super().__init__(config) padding_idx, vocab_size = config.pad_token_id, config.vocab_size self.shared = nn.Embedding(vocab_size, config.d_model, padding_idx) self.encoder = BlenderbotSmallEncoder(config, self.shared) self.decoder = BlenderbotSmallDecoder(config, self.shared) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.shared def set_input_embeddings(self, value): self.shared = value self.encoder.embed_tokens = self.shared self.decoder.embed_tokens = self.shared def get_encoder(self): return self.encoder def get_decoder(self): return self.decoder @add_start_docstrings_to_model_forward(BLENDERBOT_SMALL_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=Seq2SeqModelOutput, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[Union[Tuple, BaseModelOutput]] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.Tensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.FloatTensor], Seq2SeqModelOutput]: r""" Returns: Example: ```python >>> from transformers import AutoTokenizer, BlenderbotSmallModel >>> model = BlenderbotSmallModel.from_pretrained("facebook/blenderbot_small-90M") >>> tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M") >>> inputs = tokenizer("Studies have been shown that owning a dog is good for you", return_tensors="pt") >>> decoder_inputs = tokenizer("Studies show that", return_tensors="pt") # Batch size 1 >>> outputs = model(input_ids=inputs.input_ids, decoder_input_ids=decoder_inputs.input_ids) >>> last_hidden_states = outputs.last_hidden_state >>> list(last_hidden_states.shape) [1, 3, 512] ```""" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict if encoder_outputs is None: encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) # If the user passed a tuple for encoder_outputs, we wrap it in a BaseModelOutput when return_dict=True elif return_dict and not isinstance(encoder_outputs, BaseModelOutput): encoder_outputs = BaseModelOutput( last_hidden_state=encoder_outputs[0], hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None, attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None, ) # decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn) decoder_outputs = self.decoder( input_ids=decoder_input_ids, attention_mask=decoder_attention_mask, encoder_hidden_states=encoder_outputs[0], encoder_attention_mask=attention_mask, head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if not return_dict: return decoder_outputs + encoder_outputs return Seq2SeqModelOutput( last_hidden_state=decoder_outputs.last_hidden_state, past_key_values=decoder_outputs.past_key_values, decoder_hidden_states=decoder_outputs.hidden_states, decoder_attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, encoder_last_hidden_state=encoder_outputs.last_hidden_state, encoder_hidden_states=encoder_outputs.hidden_states, encoder_attentions=encoder_outputs.attentions, ) @add_start_docstrings( "The BlenderbotSmall Model with a language modeling head. Can be used for summarization.", BLENDERBOT_SMALL_START_DOCSTRING, ) class BlenderbotSmallForConditionalGeneration(BlenderbotSmallPreTrainedModel): base_model_prefix = "model" _keys_to_ignore_on_load_missing = [ r"final_logits_bias", r"encoder.version", r"decoder.version", r"lm_head.weight", "encoder.embed_tokens.weight", "decoder.embed_tokens.weight", ] def __init__(self, config: BlenderbotSmallConfig): super().__init__(config) self.model = BlenderbotSmallModel(config) self.register_buffer("final_logits_bias", torch.zeros((1, self.model.shared.num_embeddings))) self.lm_head = nn.Linear(config.d_model, self.model.shared.num_embeddings, bias=False) # Initialize weights and apply final processing self.post_init() def get_encoder(self): return self.model.get_encoder() def get_decoder(self): return self.model.get_decoder() def resize_token_embeddings(self, new_num_tokens: int) -> nn.Embedding: new_embeddings = super().resize_token_embeddings(new_num_tokens) self._resize_final_logits_bias(new_num_tokens) return new_embeddings def _resize_final_logits_bias(self, new_num_tokens: int) -> None: old_num_tokens = self.final_logits_bias.shape[-1] if new_num_tokens <= old_num_tokens: new_bias = self.final_logits_bias[:, :new_num_tokens] else: extra_bias = torch.zeros((1, new_num_tokens - old_num_tokens), device=self.final_logits_bias.device) new_bias = torch.cat([self.final_logits_bias, extra_bias], dim=1) self.register_buffer("final_logits_bias", new_bias) def get_output_embeddings(self): return self.lm_head def set_output_embeddings(self, new_embeddings): self.lm_head = new_embeddings @add_start_docstrings_to_model_forward(BLENDERBOT_SMALL_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC) @add_end_docstrings(BLENDERBOT_SMALL_GENERATION_EXAMPLE) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[Union[Tuple, BaseModelOutput]] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.Tensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.FloatTensor], Seq2SeqLMOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. Returns: """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict if labels is not None: if use_cache: logger.warning("The `use_cache` argument is changed to `False` since `labels` is provided.") use_cache = False if decoder_input_ids is None and decoder_inputs_embeds is None: decoder_input_ids = shift_tokens_right( labels, self.config.pad_token_id, self.config.decoder_start_token_id ) outputs = self.model( input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, encoder_outputs=encoder_outputs, decoder_attention_mask=decoder_attention_mask, head_mask=head_mask, decoder_head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=inputs_embeds, decoder_inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) lm_logits = self.lm_head(outputs[0]) + self.final_logits_bias masked_lm_loss = None if labels is not None: loss_fct = CrossEntropyLoss() masked_lm_loss = loss_fct(lm_logits.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (lm_logits,) + outputs[1:] return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output return Seq2SeqLMOutput( loss=masked_lm_loss, logits=lm_logits, past_key_values=outputs.past_key_values, decoder_hidden_states=outputs.decoder_hidden_states, decoder_attentions=outputs.decoder_attentions, cross_attentions=outputs.cross_attentions, encoder_last_hidden_state=outputs.encoder_last_hidden_state, encoder_hidden_states=outputs.encoder_hidden_states, encoder_attentions=outputs.encoder_attentions, ) def prepare_inputs_for_generation( self, decoder_input_ids, past_key_values=None, attention_mask=None, head_mask=None, decoder_head_mask=None, cross_attn_head_mask=None, use_cache=None, encoder_outputs=None, **kwargs, ): # cut decoder_input_ids if past is used if past_key_values is not None: decoder_input_ids = decoder_input_ids[:, -1:] return { "input_ids": None, # encoder_outputs is defined. input_ids not needed "encoder_outputs": encoder_outputs, "past_key_values": past_key_values, "decoder_input_ids": decoder_input_ids, "attention_mask": attention_mask, "head_mask": head_mask, "decoder_head_mask": decoder_head_mask, "cross_attn_head_mask": cross_attn_head_mask, "use_cache": use_cache, # change this to avoid caching (presumably for debugging) } @staticmethod def _reorder_cache(past_key_values, beam_idx): reordered_past = () for layer_past in past_key_values: # cached cross_attention states don't have to be reordered -> they are always the same reordered_past += ( tuple(past_state.index_select(0, beam_idx) for past_state in layer_past[:2]) + layer_past[2:], ) return reordered_past # Copied from transformers.models.bart.modeling_bart.BartDecoderWrapper with Bart->BlenderbotSmall class BlenderbotSmallDecoderWrapper(BlenderbotSmallPreTrainedModel): """ This wrapper class is a helper class to correctly load pretrained checkpoints when the causal language model is used in combination with the [`EncoderDecoderModel`] framework. """ def __init__(self, config): super().__init__(config) self.decoder = BlenderbotSmallDecoder(config) def forward(self, *args, **kwargs): return self.decoder(*args, **kwargs) # Copied from transformers.models.bart.modeling_bart.BartForCausalLM with Bart->BlenderbotSmall, facebook/bart-base->facebook/blenderbot_small-90M class BlenderbotSmallForCausalLM(BlenderbotSmallPreTrainedModel): _keys_to_ignore_on_load_missing = ["lm_head.weight"] def __init__(self, config): config = copy.deepcopy(config) config.is_decoder = True config.is_encoder_decoder = False super().__init__(config) self.model = BlenderbotSmallDecoderWrapper(config) self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.model.decoder.embed_tokens def set_input_embeddings(self, value): self.model.decoder.embed_tokens = value def get_output_embeddings(self): return self.lm_head def set_output_embeddings(self, new_embeddings): self.lm_head = new_embeddings def set_decoder(self, decoder): self.model.decoder = decoder def get_decoder(self): return self.model.decoder @replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC) def forward( self, input_ids: torch.LongTensor = None, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, CausalLMOutputWithCrossAttentions]: r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`: head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. The two additional tensors are only required when the model is used as a decoder in a Sequence to Sequence model. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. Returns: Example: ```python >>> from transformers import AutoTokenizer, BlenderbotSmallForCausalLM >>> tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M") >>> model = BlenderbotSmallForCausalLM.from_pretrained( ... "facebook/blenderbot_small-90M", add_cross_attention=False ... ) >>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder." >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") >>> outputs = model(**inputs) >>> logits = outputs.logits >>> expected_shape = [1, inputs.input_ids.shape[-1], model.config.vocab_size] >>> list(logits.shape) == expected_shape True ```""" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) outputs = self.model.decoder( input_ids=input_ids, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, head_mask=head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) logits = self.lm_head(outputs[0]) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (logits,) + outputs[1:] return (loss,) + output if loss is not None else output return CausalLMOutputWithCrossAttentions( loss=loss, logits=logits, past_key_values=outputs.past_key_values, hidden_states=outputs.hidden_states, attentions=outputs.attentions, cross_attentions=outputs.cross_attentions, ) def prepare_inputs_for_generation( self, input_ids, past_key_values=None, attention_mask=None, use_cache=None, **kwargs ): # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly if attention_mask is None: attention_mask = input_ids.new_ones(input_ids.shape) if past_key_values: input_ids = input_ids[:, -1:] # first step, decoder_cached_states are empty return { "input_ids": input_ids, # encoder_outputs is defined. input_ids not needed "attention_mask": attention_mask, "past_key_values": past_key_values, "use_cache": use_cache, } @staticmethod def _reorder_cache(past_key_values, beam_idx): reordered_past = () for layer_past in past_key_values: reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) return reordered_past
27182812/ChatGLM-LLaMA-chinese-insturct
69,399
src/transformers/models/blenderbot_small/modeling_tf_blenderbot_small.py
# coding=utf-8 # Copyright 2021 The Facebook, Inc and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TF 2.0 BlenderbotSmall model.""" import random from typing import List, Optional, Tuple, Union import numpy as np import tensorflow as tf from ...activations_tf import get_tf_activation from ...modeling_tf_outputs import ( TFBaseModelOutput, TFBaseModelOutputWithPastAndCrossAttentions, TFSeq2SeqLMOutput, TFSeq2SeqModelOutput, ) # Public API from ...modeling_tf_utils import ( DUMMY_INPUTS, TFCausalLanguageModelingLoss, TFPreTrainedModel, keras_serializable, unpack_inputs, ) from ...tf_utils import shape_list, stable_softmax from ...utils import ( ContextManagers, add_code_sample_docstrings, add_end_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_blenderbot_small import BlenderbotSmallConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "facebook/blenderbot_small-90M" _CONFIG_FOR_DOC = "BlenderbotSmallConfig" LARGE_NEGATIVE = -1e8 # Copied from transformers.models.bart.modeling_tf_bart.shift_tokens_right def shift_tokens_right(input_ids: tf.Tensor, pad_token_id: int, decoder_start_token_id: int): pad_token_id = tf.cast(pad_token_id, input_ids.dtype) decoder_start_token_id = tf.cast(decoder_start_token_id, input_ids.dtype) start_tokens = tf.fill( (shape_list(input_ids)[0], 1), tf.convert_to_tensor(decoder_start_token_id, input_ids.dtype) ) shifted_input_ids = tf.concat([start_tokens, input_ids[:, :-1]], -1) # replace possible -100 values in labels by `pad_token_id` shifted_input_ids = tf.where( shifted_input_ids == -100, tf.fill(shape_list(shifted_input_ids), tf.convert_to_tensor(pad_token_id, input_ids.dtype)), shifted_input_ids, ) # "Verify that `labels` has only positive values and -100" assert_gte0 = tf.debugging.assert_greater_equal(shifted_input_ids, tf.constant(0, dtype=input_ids.dtype)) # Make sure the assertion op is called by wrapping the result in an identity no-op with tf.control_dependencies([assert_gte0]): shifted_input_ids = tf.identity(shifted_input_ids) return shifted_input_ids # Copied from transformers.models.bart.modeling_tf_bart._make_causal_mask def _make_causal_mask(input_ids_shape: tf.TensorShape, past_key_values_length: int = 0): """ Make causal mask used for bi-directional self-attention. """ bsz = input_ids_shape[0] tgt_len = input_ids_shape[1] mask = tf.ones((tgt_len, tgt_len)) * LARGE_NEGATIVE mask_cond = tf.range(shape_list(mask)[-1]) mask = tf.where(mask_cond < tf.reshape(mask_cond + 1, (shape_list(mask)[-1], 1)), 0.0, mask) if past_key_values_length > 0: mask = tf.concat([tf.zeros((tgt_len, past_key_values_length)), mask], axis=-1) return tf.tile(mask[None, None, :, :], (bsz, 1, 1, 1)) # Copied from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ src_len = shape_list(mask)[1] tgt_len = tgt_len if tgt_len is not None else src_len one_cst = tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1)) return (one_cst - expanded_mask) * LARGE_NEGATIVE # Copied from transformers.models.blenderbot.modeling_tf_blenderbot.TFBlenderbotLearnedPositionalEmbedding with Blenderbot->BlenderbotSmall class TFBlenderbotSmallLearnedPositionalEmbedding(tf.keras.layers.Embedding): """ This module learns positional embeddings up to a fixed maximum size. """ def __init__(self, num_embeddings: int, embedding_dim: int, **kwargs): super().__init__(num_embeddings, embedding_dim, **kwargs) def call( self, input_shape: tf.TensorShape, past_key_values_length: int = 0, position_ids: Optional[tf.Tensor] = None ): """Input is expected to be of size [bsz x seqlen].""" if position_ids is None: seq_len = input_shape[1] position_ids = tf.range(seq_len, delta=1, name="range") position_ids += past_key_values_length return super().call(tf.cast(position_ids, dtype=tf.int32)) # Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with Bart->BlenderbotSmall class TFBlenderbotSmallAttention(tf.keras.layers.Layer): """Multi-headed attention from "Attention Is All You Need""" def __init__( self, embed_dim: int, num_heads: int, dropout: float = 0.0, is_decoder: bool = False, bias: bool = True, **kwargs, ): super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim // num_heads if (self.head_dim * num_heads) != self.embed_dim: raise ValueError( f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" f" and `num_heads`: {num_heads})." ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="k_proj") self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="q_proj") self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="v_proj") self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="out_proj") def _shape(self, tensor: tf.Tensor, seq_len: int, bsz: int): return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0, 2, 1, 3)) def call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, embed_dim = shape_list(hidden_states) # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True) tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len], message=( f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" f" {shape_list(attn_weights)}" ), ) if attention_mask is not None: tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=( f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is" f" {shape_list(attention_mask)}" ), ) attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_weights = stable_softmax(attn_weights, axis=-1) if layer_head_mask is not None: tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=( f"Head mask for a single layer should be of size {(self.num_heads)}, but is" f" {shape_list(layer_head_mask)}" ), ) attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states) tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim], message=( f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is" f" {shape_list(attn_output)}" ), ) attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3) ) attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return attn_output, attn_weights, past_key_value # Copied from transformers.models.bart.modeling_tf_bart.TFBartEncoderLayer with Bart->BlenderbotSmall class TFBlenderbotSmallEncoderLayer(tf.keras.layers.Layer): def __init__(self, config: BlenderbotSmallConfig, **kwargs): super().__init__(**kwargs) self.embed_dim = config.d_model self.self_attn = TFBlenderbotSmallAttention( self.embed_dim, config.encoder_attention_heads, dropout=config.attention_dropout, name="self_attn" ) self.self_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="self_attn_layer_norm") self.dropout = tf.keras.layers.Dropout(config.dropout) self.activation_fn = get_tf_activation(config.activation_function) self.activation_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.fc1 = tf.keras.layers.Dense(config.encoder_ffn_dim, name="fc1") self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") def call( self, hidden_states: tf.Tensor, attention_mask: Optional[Union[np.ndarray, tf.Tensor]], layer_head_mask: Optional[tf.Tensor], training: Optional[bool] = False, ) -> tf.Tensor: """ Args: hidden_states (`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (`tf.Tensor`): mask for attention heads in a given layer of size `(encoder_attention_heads,)` """ residual = hidden_states hidden_states, self_attn_weights, _ = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask ) tf.debugging.assert_equal( shape_list(hidden_states), shape_list(residual), message=f"Self attn modified the shape of query {shape_list(residual)} to {shape_list(hidden_states)}", ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout(hidden_states, training=training) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) return hidden_states, self_attn_weights # Copied from transformers.models.bart.modeling_tf_bart.TFBartDecoderLayer with Bart->BlenderbotSmall class TFBlenderbotSmallDecoderLayer(tf.keras.layers.Layer): def __init__(self, config: BlenderbotSmallConfig, **kwargs): super().__init__(**kwargs) self.embed_dim = config.d_model self.self_attn = TFBlenderbotSmallAttention( embed_dim=self.embed_dim, num_heads=config.decoder_attention_heads, dropout=config.attention_dropout, name="self_attn", is_decoder=True, ) self.dropout = tf.keras.layers.Dropout(config.dropout) self.activation_fn = get_tf_activation(config.activation_function) self.activation_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.self_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="self_attn_layer_norm") self.encoder_attn = TFBlenderbotSmallAttention( self.embed_dim, config.decoder_attention_heads, dropout=config.attention_dropout, name="encoder_attn", is_decoder=True, ) self.encoder_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="encoder_attn_layer_norm") self.fc1 = tf.keras.layers.Dense(config.decoder_ffn_dim, name="fc1") self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") def call( self, hidden_states: tf.Tensor, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, encoder_hidden_states: Optional[Union[np.ndarray, tf.Tensor]] = None, encoder_attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, layer_head_mask: Optional[tf.Tensor] = None, cross_attn_layer_head_mask: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[Union[np.ndarray, tf.Tensor]]]] = None, training: Optional[bool] = False, ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: """ Args: hidden_states (`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (`tf.Tensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (`tf.Tensor`): mask for attention heads in a given layer of size `(decoder_attention_heads,)` cross_attn_layer_head_mask (`tf.Tensor`): mask for heads of the cross-attention module. `(decoder_attention_heads,)` past_key_value (`Tuple(tf.Tensor)`): cached past key and value projection states """ residual = hidden_states # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Cross-Attention Block cross_attn_present_key_value = None cross_attn_weights = None if encoder_hidden_states is not None: residual = hidden_states # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=cross_attn_layer_head_mask, past_key_value=cross_attn_past_key_value, ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout(hidden_states, training=training) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) return ( hidden_states, self_attn_weights, cross_attn_weights, present_key_value, ) class TFBlenderbotSmallPreTrainedModel(TFPreTrainedModel): config_class = BlenderbotSmallConfig base_model_prefix = "model" @property def dummy_inputs(self): pad_token = 1 input_ids = tf.convert_to_tensor(DUMMY_INPUTS, dtype=tf.int32) decoder_input_ids = tf.convert_to_tensor(DUMMY_INPUTS, dtype=tf.int32) dummy_inputs = { "decoder_input_ids": decoder_input_ids, "attention_mask": tf.cast(input_ids != pad_token, tf.int32), "input_ids": input_ids, } return dummy_inputs @tf.function( input_signature=[ { "input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"), "attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"), "decoder_input_ids": tf.TensorSpec((None, None), tf.int32, name="decoder_input_ids"), "decoder_attention_mask": tf.TensorSpec((None, None), tf.int32, name="decoder_attention_mask"), } ] ) # Copied from transformers.models.bart.modeling_tf_bart.TFBartPretrainedModel.serving def serving(self, inputs): output = self.call(inputs) return self.serving_output(output) BLENDERBOT_SMALL_START_DOCSTRING = r""" This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage and behavior. <Tip> TensorFlow models and layers in `transformers` accept two formats as input: - having all inputs as keyword arguments (like PyTorch models), or - having all inputs as a list, tuple or dict in the first positional argument. The reason the second format is supported is that Keras methods prefer this format when passing inputs to models and layers. Because of this support, when using methods like `model.fit()` things should "just work" for you - just pass your inputs and labels in any format that `model.fit()` supports! If, however, you want to use the second format outside of Keras methods like `fit()` and `predict()`, such as when creating your own layers or models with the Keras `Functional` API, there are three possibilities you can use to gather all the input Tensors in the first positional argument: - a single Tensor with `input_ids` only and nothing else: `model(input_ids)` - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring: `model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])` - a dictionary with one or several input Tensors associated to the input names given in the docstring: `model({"input_ids": input_ids, "token_type_ids": token_type_ids})` Note that when creating models and layers with [subclassing](https://keras.io/guides/making_new_layers_and_models_via_subclassing/) then you don't need to worry about any of this, as you can just pass inputs like you would to any other Python function! </Tip> Args: config ([`BlenderbotSmallConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~TFPreTrainedModel.from_pretrained`] method to load the model weights. """ BLENDERBOT_SMALL_GENERATION_EXAMPLE = r""" Conversation example:: ```py >>> from transformers import AutoTokenizer, TFBlenderbotSmallForConditionalGeneration >>> mname = "facebook/blenderbot_small-90M" >>> model = BlenderbotSmallForConditionalGeneration.from_pretrained(mname) >>> tokenizer = AutoTokenizer.from_pretrained(mname) >>> UTTERANCE = "My friends are cool but they eat too many carbs." >>> print("Human: ", UTTERANCE) >>> inputs = tokenizer([UTTERANCE], return_tensors="tf") >>> reply_ids = model.generate(**inputs) >>> print("Bot: ", tokenizer.batch_decode(reply_ids, skip_special_tokens=True)[0]) what kind of carbs do they eat? i don't know much about carbs. >>> REPLY = "I'm not sure" >>> print("Human: ", REPLY) >>> NEXT_UTTERANCE = ( ... "My friends are cool but they eat too many carbs.</s> " ... "<s>what kind of carbs do they eat? i don't know much about carbs.</s> " ... "<s>I'm not sure." ... ) >>> inputs = tokenizer([NEXT_UTTERANCE], return_tensors="tf") >>> inputs.pop("token_type_ids") >>> next_reply_ids = model.generate(**inputs) >>> print("Bot: ", tokenizer.batch_decode(next_reply_ids, skip_special_tokens=True)[0]) ``` """ BLENDERBOT_SMALL_INPUTS_DOCSTRING = r""" Args: input_ids (`tf.Tensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`tf.Tensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) decoder_input_ids (`tf.Tensor` of shape `(batch_size, target_sequence_length)`, *optional*): Indices of decoder input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are decoder input IDs?](../glossary#decoder-input-ids) BlenderbotSmall uses the `bos_token_id` as the starting token for `decoder_input_ids` generation. If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see `past_key_values`). decoder_attention_mask (`tf.Tensor` of shape `(batch_size, target_sequence_length)`, *optional*): will be made by default and ignore pad tokens. It is not recommended to set this for most use cases. decoder_position_ids (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each decoder input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. head_mask (`tf.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. decoder_head_mask (`tf.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`tf.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. encoder_outputs (`tf.FloatTensor`, *optional*): hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. of shape `(batch_size, sequence_length, hidden_size)` is a sequence of past_key_values (`Tuple[Tuple[tf.Tensor]]` of length `config.n_layers`) contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. use_cache (`bool`, *optional*, defaults to `True`): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). Set to `False` during training, `True` during generation output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in eager mode, in graph mode the value will always be set to True. training (`bool`, *optional*, defaults to `False`): Whether or not to use the model in training mode (some modules like dropout modules have different behaviors between training and evaluation). """ @keras_serializable class TFBlenderbotSmallEncoder(tf.keras.layers.Layer): config_class = BlenderbotSmallConfig """ Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a [`TFBlenderbotSmallEncoderLayer`]. Args: config: BlenderbotSmallConfig """ def __init__( self, config: BlenderbotSmallConfig, embed_tokens: Optional[tf.keras.layers.Embedding] = None, **kwargs ): super().__init__(**kwargs) self.config = config self.dropout = tf.keras.layers.Dropout(config.dropout) self.layerdrop = config.encoder_layerdrop self.padding_idx = config.pad_token_id self.max_source_positions = config.max_position_embeddings self.embed_scale = tf.math.sqrt(float(config.d_model)) if config.scale_embedding else 1.0 self.embed_tokens = embed_tokens self.embed_positions = TFBlenderbotSmallLearnedPositionalEmbedding( config.max_position_embeddings, config.d_model, name="embed_positions", ) self.layers = [TFBlenderbotSmallEncoderLayer(config, name=f"layers.{i}") for i in range(config.encoder_layers)] self.layernorm_embedding = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="layernorm_embedding") def get_embed_tokens(self): return self.embed_tokens def set_embed_tokens(self, embed_tokens): self.embed_tokens = embed_tokens @unpack_inputs def call( self, input_ids=None, inputs_embeds=None, attention_mask=None, head_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, training=False, ): """ Args: input_ids (`tf.Tensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`tf.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, `optional): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in eager mode, in graph mode the value will always be set to True. training (`bool`, *optional*, defaults to `False`): Whether or not to use the model in training mode (some modules like dropout modules have different behaviors between training and evaluation). """ if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = shape_list(input_ids) elif inputs_embeds is not None: input_shape = shape_list(inputs_embeds)[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if inputs_embeds is None: # if `self.embed_tokens.load_weight_prefix` is set, runs the embedding operation with the correct name # scope, so that its weights are registered with the desired name for loading/storing. When `tf.name_scope` # is used with a name ending in `/`, that name replaces the current name scope. # (embeddings with tf.name_scope: self.embed_tokens.load_weight_prefix/self.embed_tokens.name/embeddings:0) context = [] if hasattr(self.embed_tokens, "load_weight_prefix"): context.append(tf.name_scope(self.embed_tokens.load_weight_prefix + "/")) with ContextManagers(context): # Note: tf.gather, on which the embedding layer is based, won't check positive out of bound # indices on GPU, returning zeros instead. This is a dangerous silent behavior. tf.debugging.assert_less( input_ids, tf.cast(self.embed_tokens.input_dim, dtype=input_ids.dtype), message=( "input_ids must be smaller than the embedding layer's input dimension (got" f" {tf.math.reduce_max(input_ids)} >= {self.embed_tokens.input_dim})" ), ) inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale embed_pos = self.embed_positions(input_shape) hidden_states = inputs_embeds + embed_pos hidden_states = self.layernorm_embedding(hidden_states) hidden_states = self.dropout(hidden_states, training=training) # check attention mask and invert if attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] attention_mask = _expand_mask(attention_mask) else: attention_mask = None encoder_states = () if output_hidden_states else None all_attentions = () if output_attentions else None # check if head_mask has a correct number of layers specified if desired if head_mask is not None: tf.debugging.assert_equal( shape_list(head_mask)[0], len(self.layers), message=( f"The head_mask should be specified for {len(self.layers)} layers, but it is for" f" {shape_list(head_mask)[0]}." ), ) # encoder layers for idx, encoder_layer in enumerate(self.layers): if output_hidden_states: encoder_states = encoder_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) if training and (dropout_probability < self.layerdrop): # skip the layer continue hidden_states, attn = encoder_layer( hidden_states, attention_mask, head_mask[idx] if head_mask is not None else None, ) if output_attentions: all_attentions += (attn,) if output_hidden_states: encoder_states = encoder_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions ) @keras_serializable class TFBlenderbotSmallDecoder(tf.keras.layers.Layer): config_class = BlenderbotSmallConfig """ Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a [`TFBlenderbotSmallDecoderLayer`] Args: config: BlenderbotSmallConfig embed_tokens: output embedding """ def __init__( self, config: BlenderbotSmallConfig, embed_tokens: Optional[tf.keras.layers.Embedding] = None, **kwargs ): super().__init__(**kwargs) self.config = config self.padding_idx = config.pad_token_id self.embed_tokens = embed_tokens self.layerdrop = config.decoder_layerdrop self.embed_positions = TFBlenderbotSmallLearnedPositionalEmbedding( config.max_position_embeddings, config.d_model, name="embed_positions", ) self.embed_scale = tf.math.sqrt(float(config.d_model)) if config.scale_embedding else 1.0 self.layers = [TFBlenderbotSmallDecoderLayer(config, name=f"layers.{i}") for i in range(config.decoder_layers)] self.layernorm_embedding = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="layernorm_embedding") self.dropout = tf.keras.layers.Dropout(config.dropout) def get_embed_tokens(self): return self.embed_tokens def set_embed_tokens(self, embed_tokens): self.embed_tokens = embed_tokens @unpack_inputs def call( self, input_ids=None, inputs_embeds=None, attention_mask=None, position_ids=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, cross_attn_head_mask=None, past_key_values=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, training=False, ): r""" Args: input_ids (`tf.Tensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) position_ids (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each decoder input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. encoder_hidden_states (`tf.Tensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (`tf.Tensor` of shape `(batch_size, encoder_sequence_length)`, *optional*): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`tf.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`tf.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. past_key_values (`Tuple[Tuple[tf.Tensor]]` of length `config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in eager mode, in graph mode the value will always be set to True. training (`bool`, *optional*, defaults to `False`): Whether or not to use the model in training mode (some modules like dropout modules have different behaviors between training and evaluation). """ if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") elif input_ids is not None: input_shape = shape_list(input_ids) elif inputs_embeds is not None: input_shape = shape_list(inputs_embeds)[:-1] else: raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") past_key_values_length = shape_list(past_key_values[0][0])[2] if past_key_values is not None else 0 if inputs_embeds is None: # if `self.embed_tokens.load_weight_prefix` is set, runs the embedding operation with the correct name # scope, so that its weights are registered with the desired name for loading/storing. When `tf.name_scope` # is used with a name ending in `/`, that name replaces the current name scope. # (embeddings with tf.name_scope: self.embed_tokens.load_weight_prefix/self.embed_tokens.name/embeddings:0) context = [] if hasattr(self.embed_tokens, "load_weight_prefix"): context.append(tf.name_scope(self.embed_tokens.load_weight_prefix + "/")) with ContextManagers(context): # Note: tf.gather, on which the embedding layer is based, won't check positive out of bound # indices on GPU, returning zeros instead. This is a dangerous silent behavior. tf.debugging.assert_less( input_ids, tf.cast(self.embed_tokens.input_dim, dtype=input_ids.dtype), message=( "input_ids must be smaller than the embedding layer's input dimension (got" f" {tf.math.reduce_max(input_ids)} >= {self.embed_tokens.input_dim})" ), ) inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] if input_shape[-1] > 1: combined_attention_mask = _make_causal_mask(input_shape, past_key_values_length=past_key_values_length) else: combined_attention_mask = _expand_mask( tf.ones((input_shape[0], input_shape[1] + past_key_values_length)), tgt_len=input_shape[-1] ) if attention_mask is not None: combined_attention_mask = combined_attention_mask + _expand_mask(attention_mask, tgt_len=input_shape[-1]) if encoder_hidden_states is not None and encoder_attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] encoder_attention_mask = _expand_mask(encoder_attention_mask, tgt_len=input_shape[-1]) # embed positions if position_ids is None: positions = self.embed_positions(input_shape, past_key_values_length) else: positions = self.embed_positions(input_shape, position_ids=position_ids) hidden_states = self.layernorm_embedding(inputs_embeds) + positions hidden_states = self.dropout(hidden_states, training=training) # decoder layers all_hidden_states = () if output_hidden_states else None all_self_attns = () if output_attentions else None all_cross_attns = () if (output_attentions and encoder_hidden_states is not None) else None present_key_values = () if use_cache else None # check if head_mask and cross_attn_head_mask have a correct number of layers specified if desired for attn_mask_name, attn_mask in [("head_mask", head_mask), ("cross_attn_head_mask", cross_attn_head_mask)]: if attn_mask is not None: tf.debugging.assert_equal( shape_list(attn_mask)[0], len(self.layers), message=( f"The {attn_mask_name} should be specified for {len(self.layers)} layers, but it is for" f" {shape_list(attn_mask)[0]}." ), ) for idx, decoder_layer in enumerate(self.layers): # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) if output_hidden_states: all_hidden_states += (hidden_states,) dropout_probability = random.uniform(0, 1) if training and (dropout_probability < self.layerdrop): continue past_key_value = past_key_values[idx] if past_key_values is not None else None hidden_states, layer_self_attn, layer_cross_attn, present_key_value = decoder_layer( hidden_states, attention_mask=combined_attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, layer_head_mask=head_mask[idx] if head_mask is not None else None, cross_attn_layer_head_mask=cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None, past_key_value=past_key_value, ) if use_cache: present_key_values += (present_key_value,) if output_attentions: all_self_attns += (layer_self_attn,) if encoder_hidden_states is not None: all_cross_attns += (layer_cross_attn,) if output_hidden_states: all_hidden_states += (hidden_states,) if not return_dict: return hidden_states, present_key_values, all_hidden_states, all_self_attns, all_cross_attns else: return TFBaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=present_key_values, hidden_states=all_hidden_states, attentions=all_self_attns, cross_attentions=all_cross_attns, ) @keras_serializable class TFBlenderbotSmallMainLayer(tf.keras.layers.Layer): config_class = BlenderbotSmallConfig def __init__(self, config: BlenderbotSmallConfig, **kwargs): super().__init__(**kwargs) self.config = config self.shared = tf.keras.layers.Embedding( input_dim=config.vocab_size, output_dim=config.d_model, embeddings_initializer=tf.keras.initializers.TruncatedNormal(stddev=self.config.init_std), name="model.shared", ) # Additional attribute to specify the expected name scope of the layer (for loading/storing weights) self.shared.load_weight_prefix = "model.shared" self.encoder = TFBlenderbotSmallEncoder(config, self.shared, name="encoder") self.decoder = TFBlenderbotSmallDecoder(config, self.shared, name="decoder") def get_input_embeddings(self): return self.shared def set_input_embeddings(self, new_embeddings): self.shared = new_embeddings self.encoder.embed_tokens = self.shared self.decoder.embed_tokens = self.shared @unpack_inputs def call( self, input_ids=None, attention_mask=None, decoder_input_ids=None, decoder_attention_mask=None, decoder_position_ids=None, head_mask=None, decoder_head_mask=None, cross_attn_head_mask=None, encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, past_key_values=None, inputs_embeds=None, decoder_inputs_embeds=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, training=False, **kwargs, ): output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) if encoder_outputs is None: encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) # If the user passed a tuple for encoder_outputs, we wrap it in a TFBaseModelOutput when return_dict=True elif return_dict and not isinstance(encoder_outputs, TFBaseModelOutput): encoder_outputs = TFBaseModelOutput( last_hidden_state=encoder_outputs[0], hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None, attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None, ) # If the user passed a TFBaseModelOutput for encoder_outputs, we wrap it in a tuple when return_dict=False elif not return_dict and not isinstance(encoder_outputs, tuple): encoder_outputs = encoder_outputs.to_tuple() decoder_outputs = self.decoder( decoder_input_ids, attention_mask=decoder_attention_mask, position_ids=decoder_position_ids, encoder_hidden_states=encoder_outputs[0], encoder_attention_mask=attention_mask, head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) if not return_dict: return decoder_outputs + encoder_outputs return TFSeq2SeqModelOutput( last_hidden_state=decoder_outputs.last_hidden_state, past_key_values=decoder_outputs.past_key_values, decoder_hidden_states=decoder_outputs.hidden_states, decoder_attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, encoder_last_hidden_state=encoder_outputs.last_hidden_state, encoder_hidden_states=encoder_outputs.hidden_states, encoder_attentions=encoder_outputs.attentions, ) @add_start_docstrings( "The bare BLENDERBOT_SMALL Model outputting raw hidden-states without any specific head on top.", BLENDERBOT_SMALL_START_DOCSTRING, ) class TFBlenderbotSmallModel(TFBlenderbotSmallPreTrainedModel): def __init__(self, config: BlenderbotSmallConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.model = TFBlenderbotSmallMainLayer(config, name="model") def get_encoder(self): return self.model.encoder def get_decoder(self): return self.model.decoder @unpack_inputs @add_start_docstrings_to_model_forward(BLENDERBOT_SMALL_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFSeq2SeqModelOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[tf.Tensor] = None, attention_mask: Optional[tf.Tensor] = None, decoder_input_ids: Optional[tf.Tensor] = None, decoder_attention_mask: Optional[tf.Tensor] = None, decoder_position_ids: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None, decoder_head_mask: Optional[tf.Tensor] = None, cross_attn_head_mask: Optional[tf.Tensor] = None, encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, past_key_values: Optional[List[tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, decoder_inputs_embeds: Optional[tf.Tensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: Optional[bool] = False, **kwargs, ) -> Union[Tuple[tf.Tensor], TFSeq2SeqModelOutput]: outputs = self.model( input_ids=input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, decoder_attention_mask=decoder_attention_mask, decoder_position_ids=decoder_position_ids, head_mask=head_mask, decoder_head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, encoder_outputs=encoder_outputs, past_key_values=past_key_values, inputs_embeds=inputs_embeds, decoder_inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) return outputs # Copied from transformers.models.bart.modeling_tf_bart.TFBartModel.serving_output def serving_output(self, output): pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None cross_attns = tf.convert_to_tensor(output.cross_attentions) if self.config.output_attentions else None enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None return TFSeq2SeqModelOutput( last_hidden_state=output.last_hidden_state, past_key_values=pkv, decoder_hidden_states=dec_hs, decoder_attentions=dec_attns, cross_attentions=cross_attns, encoder_last_hidden_state=output.encoder_last_hidden_state, encoder_hidden_states=enc_hs, encoder_attentions=enc_attns, ) # Copied from transformers.models.bart.modeling_tf_bart.BiasLayer class BiasLayer(tf.keras.layers.Layer): """ Bias as a layer. It is used for serialization purposes: `tf.keras.Model.save_weights` stores on a per-layer basis, so all weights have to be registered in a layer. """ def __init__(self, shape, initializer, trainable, name, **kwargs): super().__init__(name=name, **kwargs) # Note: the name of this variable will NOT be scoped when serialized, i.e. it will not be in the format of # "outer_layer/inner_layer/.../name:0". Instead, it will be "name:0". For further details, see: # https://github.com/huggingface/transformers/pull/18833#issuecomment-1233090214 self.bias = self.add_weight(name=name, shape=shape, initializer=initializer, trainable=trainable) def call(self, x): return x + self.bias @add_start_docstrings( "The BLENDERBOT_SMALL Model with a language modeling head. Can be used for summarization.", BLENDERBOT_SMALL_START_DOCSTRING, ) class TFBlenderbotSmallForConditionalGeneration(TFBlenderbotSmallPreTrainedModel, TFCausalLanguageModelingLoss): _keys_to_ignore_on_load_unexpected = [ r"model.encoder.embed_tokens.weight", r"model.decoder.embed_tokens.weight", ] def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.model = TFBlenderbotSmallMainLayer(config, name="model") self.use_cache = config.use_cache # final_bias_logits is registered as a buffer in pytorch, so not trainable for the sake of consistency. self.bias_layer = BiasLayer( name="final_logits_bias", shape=[1, config.vocab_size], initializer="zeros", trainable=False ) def get_decoder(self): return self.model.decoder def get_encoder(self): return self.model.encoder def get_output_embeddings(self): return self.get_input_embeddings() def set_output_embeddings(self, value): self.set_input_embeddings(value) def get_bias(self): return {"final_logits_bias": self.bias_layer.bias} def set_bias(self, value): # Replaces the existing layers containing bias for correct (de)serialization. vocab_size = value["final_logits_bias"].shape[-1] self.bias_layer = BiasLayer( name="final_logits_bias", shape=[1, vocab_size], initializer="zeros", trainable=False ) self.bias_layer.bias.assign(value["final_logits_bias"]) @unpack_inputs @add_start_docstrings_to_model_forward(BLENDERBOT_SMALL_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFSeq2SeqLMOutput, config_class=_CONFIG_FOR_DOC) @add_end_docstrings(BLENDERBOT_SMALL_GENERATION_EXAMPLE) def call( self, input_ids: Optional[tf.Tensor] = None, attention_mask: Optional[tf.Tensor] = None, decoder_input_ids: Optional[tf.Tensor] = None, decoder_attention_mask: Optional[tf.Tensor] = None, decoder_position_ids: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None, decoder_head_mask: Optional[tf.Tensor] = None, cross_attn_head_mask: Optional[tf.Tensor] = None, encoder_outputs: Optional[TFBaseModelOutput] = None, past_key_values: Optional[List[tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, decoder_inputs_embeds: Optional[tf.Tensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Union[Tuple[tf.Tensor], TFSeq2SeqLMOutput]: r""" labels (`tf.tensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. Returns: """ if labels is not None: labels = tf.where( labels == self.config.pad_token_id, tf.cast(tf.fill(shape_list(labels), -100), labels.dtype), labels, ) use_cache = False if decoder_input_ids is None and decoder_inputs_embeds is None: decoder_input_ids = shift_tokens_right( labels, self.config.pad_token_id, self.config.decoder_start_token_id ) outputs = self.model( input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, decoder_attention_mask=decoder_attention_mask, decoder_position_ids=decoder_position_ids, head_mask=head_mask, decoder_head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, encoder_outputs=encoder_outputs, past_key_values=past_key_values, inputs_embeds=inputs_embeds, decoder_inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) lm_logits = tf.matmul(outputs[0], self.model.shared.weights, transpose_b=True) lm_logits = self.bias_layer(lm_logits) masked_lm_loss = None if labels is None else self.hf_compute_loss(labels, lm_logits) if not return_dict: output = (lm_logits,) + outputs[1:] return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output return TFSeq2SeqLMOutput( loss=masked_lm_loss, logits=lm_logits, past_key_values=outputs.past_key_values, # index 1 of d outputs decoder_hidden_states=outputs.decoder_hidden_states, # index 2 of d outputs decoder_attentions=outputs.decoder_attentions, # index 3 of d outputs cross_attentions=outputs.cross_attentions, # index 4 of d outputs encoder_last_hidden_state=outputs.encoder_last_hidden_state, # index 0 of encoder outputs encoder_hidden_states=outputs.encoder_hidden_states, # 1 of e out encoder_attentions=outputs.encoder_attentions, # 2 of e out ) # Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.serving_output def serving_output(self, output): pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None cross_attns = tf.convert_to_tensor(output.cross_attentions) if self.config.output_attentions else None enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None return TFSeq2SeqLMOutput( logits=output.logits, past_key_values=pkv, decoder_hidden_states=dec_hs, decoder_attentions=dec_attns, cross_attentions=cross_attns, encoder_last_hidden_state=output.encoder_last_hidden_state, encoder_hidden_states=enc_hs, encoder_attentions=enc_attns, ) # Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation def prepare_inputs_for_generation( self, decoder_input_ids, past_key_values=None, attention_mask=None, decoder_attention_mask=None, head_mask=None, decoder_head_mask=None, cross_attn_head_mask=None, use_cache=None, encoder_outputs=None, **kwargs, ): # cut decoder_input_ids if past_key_values is used if past_key_values is not None: decoder_input_ids = decoder_input_ids[:, -1:] if decoder_attention_mask is not None: # xla decoder_position_ids = tf.math.cumsum(decoder_attention_mask, axis=-1, exclusive=True)[:, -1:] elif past_key_values is not None: # no xla + past_key_values decoder_position_ids = past_key_values[0][0].shape[2] else: # no xla + no past_key_values decoder_position_ids = tf.range(decoder_input_ids.shape[1]) return { "input_ids": None, # encoder_outputs is defined. input_ids not needed "encoder_outputs": encoder_outputs, "past_key_values": past_key_values, "decoder_input_ids": decoder_input_ids, "attention_mask": attention_mask, "decoder_attention_mask": decoder_attention_mask, "decoder_position_ids": decoder_position_ids, "head_mask": head_mask, "decoder_head_mask": decoder_head_mask, "cross_attn_head_mask": cross_attn_head_mask, "use_cache": use_cache, # change this to avoid caching (presumably for debugging) }
27182812/ChatGLM-LLaMA-chinese-insturct
4,058
src/transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py
# coding=utf-8 # Copyright 2021, The Facebook, Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fast tokenization class for BlenderbotSmall.""" from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = { "vocab_file": "vocab.json", "merges_file": "merges.txt", "tokenizer_config_file": "tokenizer_config.json", } PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "facebook/blenderbot_small-90M": "https://huggingface.co/facebook/blenderbot_small-90M/resolve/main/vocab.json" }, "merges_file": { "facebook/blenderbot_small-90M": "https://huggingface.co/facebook/blenderbot_small-90M/resolve/main/merges.txt" }, "tokenizer_config_file": { "facebook/blenderbot_small-90M": ( "https://huggingface.co/facebook/blenderbot_small-90M/resolve/main/tokenizer_config.json" ) }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "facebook/blenderbot_small-90M": 512, } class BlenderbotSmallTokenizerFast(PreTrainedTokenizerFast): """ Construct a "fast" BlenderbotSmall tokenizer (backed by HuggingFace's *tokenizers* library). Args: vocab_file (`str`): Path to the vocabulary file. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES slow_tokenizer_class = BlenderbotSmallTokenizer def __init__( self, vocab_file=None, merges_file=None, unk_token="<|endoftext|>", bos_token="<|endoftext|>", eos_token="<|endoftext|>", add_prefix_space=False, trim_offsets=True, **kwargs, ): super().__init__( ByteLevelBPETokenizer( vocab=vocab_file, merges=merges_file, add_prefix_space=add_prefix_space, trim_offsets=trim_offsets, ), bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, **kwargs, ) self.add_prefix_space = add_prefix_space def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None): output = [self.bos_token_id] + token_ids_0 + [self.eos_token_id] if token_ids_1 is None: return output return output + [self.eos_token_id] + token_ids_1 + [self.eos_token_id] def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. BlenderbotSmall does not make use of token type ids, therefore a list of zeros is returned. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of zeros. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep + sep + token_ids_1 + sep) * [0]
27182812/ChatGLM-LLaMA-chinese-insturct
8,641
src/transformers/models/blenderbot_small/tokenization_blenderbot_small.py
# coding=utf-8 # Copyright 2021 The Facebook Inc. and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenization class for BlenderbotSmall.""" import json import os from typing import Dict, List, Optional, Tuple import regex as re from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = { "vocab_file": "vocab.json", "merges_file": "merges.txt", "tokenizer_config_file": "tokenizer_config.json", } PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "facebook/blenderbot_small-90M": "https://huggingface.co/facebook/blenderbot_small-90M/resolve/main/vocab.json" }, "merges_file": { "facebook/blenderbot_small-90M": "https://huggingface.co/facebook/blenderbot_small-90M/resolve/main/merges.txt" }, "tokenizer_config_file": { "facebook/blenderbot_small-90M": ( "https://huggingface.co/facebook/blenderbot_small-90M/resolve/main/tokenizer_config.json" ) }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {"facebook/blenderbot_small-90M": 512} def get_pairs(word): """ Return set of symbol pairs in a word. Word is represented as tuple of symbols (symbols being variable-length strings). """ pairs = set() prev_char = word[0] for char in word[1:]: pairs.add((prev_char, char)) prev_char = char pairs = set(pairs) return pairs class BlenderbotSmallTokenizer(PreTrainedTokenizer): """ Constructs a Blenderbot-90M tokenizer based on BPE (Byte-Pair-Encoding) This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to the superclass for more information regarding methods. Args: vocab_file (`str`): File containing the vocabulary. merges_file (`str`): Path to the merges file. bos_token (`str`, *optional*, defaults to `"__start__"`): The beginning of sentence token. eos_token (`str`, *optional*, defaults to `"__end__"`): The end of sentence token. unk_token (`str`, *optional*, defaults to `"__unk__"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. pad_token (`str`, *optional*, defaults to `"__pad__"`): The token used for padding, for example when batching sequences of different lengths. **kwargs Additional keyword arguments passed along to [`PreTrainedTokenizer`] """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES model_input_names = ["input_ids", "attention_mask"] def __init__( self, vocab_file, merges_file, bos_token="__start__", eos_token="__end__", unk_token="__unk__", pad_token="__null__", **kwargs, ): super().__init__(unk_token=unk_token, bos_token=bos_token, eos_token=eos_token, pad_token=pad_token, **kwargs) with open(vocab_file, encoding="utf-8") as vocab_handle: self.encoder = json.load(vocab_handle) self.decoder = {v: k for k, v in self.encoder.items()} with open(merges_file, encoding="utf-8") as merges_handle: merges = merges_handle.read().split("\n")[1:-1] merges = [tuple(merge.split()) for merge in merges] self.bpe_ranks = dict(zip(merges, range(len(merges)))) self.cache = {} @property def vocab_size(self) -> int: return len(self.encoder) def get_vocab(self) -> Dict: return dict(self.encoder, **self.added_tokens_encoder) def bpe(self, token: str) -> str: if token in self.cache: return self.cache[token] token = re.sub("([.,!?()])", r" \1", token) token = re.sub("(')", r" \1 ", token) token = re.sub(r"\s{2,}", " ", token) if "\n" in token: token = token.replace("\n", " __newln__") tokens = token.split(" ") words = [] for token in tokens: if not len(token): continue token = token.lower() word = tuple(token) word = tuple(list(word[:-1]) + [word[-1] + "</w>"]) pairs = get_pairs(word) if not pairs: words.append(token) continue while True: bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf"))) if bigram not in self.bpe_ranks: break first, second = bigram new_word = [] i = 0 while i < len(word): try: j = word.index(first, i) new_word.extend(word[i:j]) i = j except ValueError: new_word.extend(word[i:]) break if word[i] == first and i < len(word) - 1 and word[i + 1] == second: new_word.append(first + second) i += 2 else: new_word.append(word[i]) i += 1 new_word = tuple(new_word) word = new_word if len(word) == 1: break else: pairs = get_pairs(word) word = "@@ ".join(word) word = word[:-4] self.cache[token] = word words.append(word) return " ".join(words) def _tokenize(self, text: str) -> List[str]: """Split a string into tokens using BPE.""" split_tokens = [] words = re.findall(r"\S+\n?", text) for token in words: split_tokens.extend(list(self.bpe(token).split(" "))) return split_tokens def _convert_token_to_id(self, token: str) -> int: """Converts a token to an id using the vocab.""" token = token.lower() return self.encoder.get(token, self.encoder.get(self.unk_token)) def _convert_id_to_token(self, index: int) -> str: """Converts an index (integer) in a token (str) using the vocab.""" return self.decoder.get(index, self.unk_token) def convert_tokens_to_string(self, tokens: List[str]) -> str: """Converts a sequence of tokens in a single string.""" out_string = " ".join(tokens).replace("@@ ", "").strip() return out_string def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) merge_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["merges_file"] ) with open(vocab_file, "w", encoding="utf-8") as f: f.write(json.dumps(self.encoder, indent=2, sort_keys=True, ensure_ascii=False) + "\n") index = 0 with open(merge_file, "w", encoding="utf-8") as writer: writer.write("#version: 0.2\n") for bpe_tokens, token_index in sorted(self.bpe_ranks.items(), key=lambda kv: kv[1]): if index != token_index: logger.warning( f"Saving vocabulary to {merge_file}: BPE merge indices are not consecutive." " Please check that the tokenizer is not corrupted!" ) index = token_index writer.write(" ".join(bpe_tokens) + "\n") index += 1 return vocab_file, merge_file
27182812/ChatGLM-LLaMA-chinese-insturct
1,816
src/transformers/models/cpm/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tokenizers_available _import_structure = {} try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_cpm"] = ["CpmTokenizer"] try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_cpm_fast"] = ["CpmTokenizerFast"] if TYPE_CHECKING: try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_cpm import CpmTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_cpm_fast import CpmTokenizerFast else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
15,154
src/transformers/models/cpm/tokenization_cpm.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenization classes.""" import os import unicodedata from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import SPIECE_UNDERLINE, logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "spiece.model"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "TsinghuaAI/CPM-Generate": "https://huggingface.co/TsinghuaAI/CPM-Generate/resolve/main/spiece.model", } } class CpmTokenizer(PreTrainedTokenizer): """Runs pre-tokenization with Jieba segmentation tool. It is used in CPM models.""" def __init__( self, vocab_file, do_lower_case=False, remove_space=True, keep_accents=False, bos_token="<s>", eos_token="</s>", unk_token="<unk>", sep_token="<sep>", pad_token="<pad>", cls_token="<cls>", mask_token="<mask>", additional_special_tokens=["<eop>", "<eod>"], sp_model_kwargs: Optional[Dict[str, Any]] = None, **kwargs, ) -> None: """ Construct a CPM tokenizer. Based on [Jieba](https://pypi.org/project/jieba/) and [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): [SentencePiece](https://github.com/google/sentencepiece) file (generally has a .spm extension) that contains the vocabulary necessary to instantiate a tokenizer. do_lower_case (`bool`, *optional*, defaults to `True`): Whether to lowercase the input when tokenizing. remove_space (`bool`, *optional*, defaults to `True`): Whether to strip the text when tokenizing (removing excess spaces before and after the string). keep_accents (`bool`, *optional*, defaults to `False`): Whether to keep accents when tokenizing. bos_token (`str`, *optional*, defaults to `"<s>"`): The beginning of sequence token that was used during pretraining. Can be used a sequence classifier token. <Tip> When building a sequence using special tokens, this is not the token that is used for the beginning of sequence. The token used is the `cls_token`. </Tip> eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. <Tip> When building a sequence using special tokens, this is not the token that is used for the end of sequence. The token used is the `sep_token`. </Tip> unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"<sep>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"<cls>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. additional_special_tokens (`List[str]`, *optional*, defaults to `["<eop>", "<eod>"]`): Additional special tokens used by the tokenizer. Attributes: sp_model (`SentencePieceProcessor`): The *SentencePiece* processor that is used for every conversion (string, tokens and IDs). """ # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token self.sp_model_kwargs = {} if sp_model_kwargs is None else sp_model_kwargs super().__init__( do_lower_case=do_lower_case, remove_space=remove_space, keep_accents=keep_accents, bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, additional_special_tokens=additional_special_tokens, sp_model_kwargs=self.sp_model_kwargs, **kwargs, ) self._pad_token_type_id = 3 self.do_lower_case = do_lower_case self.remove_space = remove_space self.keep_accents = keep_accents self.vocab_file = vocab_file self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(vocab_file) try: import jieba except ModuleNotFoundError as error: raise error.__class__( "You need to install jieba to use CpmTokenizer or CpmTokenizerFast. " "See https://pypi.org/project/jieba/ for installation." ) self.jieba = jieba self.translator = str.maketrans(" \n", "\u2582\u2583") @property # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.vocab_size def vocab_size(self): return len(self.sp_model) # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.get_vocab def get_vocab(self): vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} vocab.update(self.added_tokens_encoder) return vocab # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.__getstate__ def __getstate__(self): state = self.__dict__.copy() state["sp_model"] = None return state # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.__setstate__ def __setstate__(self, d): self.__dict__ = d # for backward compatibility if not hasattr(self, "sp_model_kwargs"): self.sp_model_kwargs = {} self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) self.sp_model.Load(self.vocab_file) # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.preprocess_text def preprocess_text(self, inputs): if self.remove_space: outputs = " ".join(inputs.strip().split()) else: outputs = inputs outputs = outputs.replace("``", '"').replace("''", '"') if not self.keep_accents: outputs = unicodedata.normalize("NFKD", outputs) outputs = "".join([c for c in outputs if not unicodedata.combining(c)]) if self.do_lower_case: outputs = outputs.lower() return outputs # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer._tokenize def _tokenize(self, text: str) -> List[str]: """Tokenize a string.""" text = self.preprocess_text(text) pieces = self.sp_model.encode(text, out_type=str) new_pieces = [] for piece in pieces: if len(piece) > 1 and piece[-1] == str(",") and piece[-2].isdigit(): cur_pieces = self.sp_model.EncodeAsPieces(piece[:-1].replace(SPIECE_UNDERLINE, "")) if piece[0] != SPIECE_UNDERLINE and cur_pieces[0][0] == SPIECE_UNDERLINE: if len(cur_pieces[0]) == 1: cur_pieces = cur_pieces[1:] else: cur_pieces[0] = cur_pieces[0][1:] cur_pieces.append(piece[-1]) new_pieces.extend(cur_pieces) else: new_pieces.append(piece) return new_pieces # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer._convert_token_to_id def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" return self.sp_model.PieceToId(token) # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer._convert_id_to_token def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" return self.sp_model.IdToPiece(index) # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.convert_tokens_to_string def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (strings for sub-words) in a single string.""" out_string = "".join(tokens).replace(SPIECE_UNDERLINE, " ").strip() return out_string # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.build_inputs_with_special_tokens def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. An XLNet sequence has the following format: - single sequence: `X <sep> <cls>` - pair of sequences: `A <sep> B <sep> <cls>` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return token_ids_0 + sep + cls return token_ids_0 + sep + token_ids_1 + sep + cls # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.get_special_tokens_mask def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_special_tokens (`bool`, *optional*, defaults to `False`): Whether or not the token list is already formatted with special tokens for the model. Returns: `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True ) if token_ids_1 is not None: return ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1, 1] return ([0] * len(token_ids_0)) + [1, 1] # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.create_token_type_ids_from_sequences def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. An XLNet sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls_segment_id = [2] if token_ids_1 is None: return len(token_ids_0 + sep) * [0] + cls_segment_id return len(token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] + cls_segment_id # Copied from transformers.models.xlnet.tokenization_xlnet.XLNetTokenizer.save_vocabulary def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file) and os.path.isfile(self.vocab_file): copyfile(self.vocab_file, out_vocab_file) elif not os.path.isfile(self.vocab_file): with open(out_vocab_file, "wb") as fi: content_spiece_model = self.sp_model.serialized_model_proto() fi.write(content_spiece_model) return (out_vocab_file,) def _decode(self, *args, **kwargs): text = super()._decode(*args, **kwargs) text = text.replace(" ", "").replace("\u2582", " ").replace("\u2583", "\n") return text
27182812/ChatGLM-LLaMA-chinese-insturct
10,680
src/transformers/models/cpm/tokenization_cpm_fast.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenization classes.""" import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils_fast import AddedToken, PreTrainedTokenizerFast from ...utils import logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "spiece.model", "tokenizer_file": "tokenizer.json"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "TsinghuaAI/CPM-Generate": "https://huggingface.co/TsinghuaAI/CPM-Generate/resolve/main/spiece.model", }, "tokenizer_file": { "TsinghuaAI/CPM-Generate": "https://huggingface.co/TsinghuaAI/CPM-Generate/resolve/main/tokenizer.json", }, } class CpmTokenizerFast(PreTrainedTokenizerFast): """Runs pre-tokenization with Jieba segmentation tool. It is used in CPM models.""" def __init__( self, vocab_file=None, tokenizer_file=None, do_lower_case=False, remove_space=True, keep_accents=False, bos_token="<s>", eos_token="</s>", unk_token="<unk>", sep_token="<sep>", pad_token="<pad>", cls_token="<cls>", mask_token="<mask>", additional_special_tokens=["<eop>", "<eod>"], **kwargs, ): """ Construct a CPM tokenizer. Based on [Jieba](https://pypi.org/project/jieba/) and [SentencePiece](https://github.com/google/sentencepiece). This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): [SentencePiece](https://github.com/google/sentencepiece) file (generally has a .spm extension) that contains the vocabulary necessary to instantiate a tokenizer. do_lower_case (`bool`, *optional*, defaults to `True`): Whether to lowercase the input when tokenizing. remove_space (`bool`, *optional*, defaults to `True`): Whether to strip the text when tokenizing (removing excess spaces before and after the string). keep_accents (`bool`, *optional*, defaults to `False`): Whether to keep accents when tokenizing. bos_token (`str`, *optional*, defaults to `"<s>"`): The beginning of sequence token that was used during pretraining. Can be used a sequence classifier token. <Tip> When building a sequence using special tokens, this is not the token that is used for the beginning of sequence. The token used is the `cls_token`. </Tip> eos_token (`str`, *optional*, defaults to `"</s>"`): The end of sequence token. <Tip> When building a sequence using special tokens, this is not the token that is used for the end of sequence. The token used is the `sep_token`. </Tip> unk_token (`str`, *optional*, defaults to `"<unk>"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"<sep>"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"<pad>"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"<cls>"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"<mask>"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. additional_special_tokens (`List[str]`, *optional*, defaults to `["<eop>", "<eod>"]`): Additional special tokens used by the tokenizer. Attributes: sp_model (`SentencePieceProcessor`): The *SentencePiece* processor that is used for every conversion (string, tokens and IDs). """ # Mask token behave like a normal word, i.e. include the space before it mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token super().__init__( vocab_file=vocab_file, tokenizer_file=tokenizer_file, do_lower_case=do_lower_case, remove_space=remove_space, keep_accents=keep_accents, bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, additional_special_tokens=additional_special_tokens, **kwargs, ) self._pad_token_type_id = 3 self.do_lower_case = do_lower_case self.remove_space = remove_space self.keep_accents = keep_accents self.vocab_file = vocab_file self.can_save_slow_tokenizer = False if not self.vocab_file else True try: import jieba except ModuleNotFoundError as error: raise error.__class__( "You need to install jieba to use CpmTokenizer or CpmTokenizerFast. " "See https://pypi.org/project/jieba/ for installation." ) self.jieba = jieba self.translator = str.maketrans(" \n", "\u2582\u2583") # Copied from transformers.models.xlnet.tokenization_xlnet_fast.XLNetTokenizerFast.build_inputs_with_special_tokens def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. An XLNet sequence has the following format: - single sequence: `X <sep> <cls>` - pair of sequences: `A <sep> B <sep> <cls>` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return token_ids_0 + sep + cls return token_ids_0 + sep + token_ids_1 + sep + cls # Copied from transformers.models.xlnet.tokenization_xlnet_fast.XLNetTokenizerFast.create_token_type_ids_from_sequences def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. An XLNet sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls_segment_id = [2] if token_ids_1 is None: return len(token_ids_0 + sep) * [0] + cls_segment_id return len(token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] + cls_segment_id # Copied from transformers.models.xlnet.tokenization_xlnet_fast.XLNetTokenizerFast.save_vocabulary def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: if not self.can_save_slow_tokenizer: raise ValueError( "Your fast tokenizer does not have the necessary information to save the vocabulary for a slow " "tokenizer." ) if not os.path.isdir(save_directory): logger.error(f"Vocabulary path ({save_directory}) should be a directory") return out_vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file): copyfile(self.vocab_file, out_vocab_file) return (out_vocab_file,) def _batch_encode_plus(self, batch_text_or_text_pairs, *args, **kwargs): batch_text_or_text_pairs = [ " ".join([x.translate(self.translator) for x in self.jieba.cut(text, cut_all=False)]) for text in batch_text_or_text_pairs ] return super()._batch_encode_plus(batch_text_or_text_pairs, *args, **kwargs) def _decode(self, *args, **kwargs): text = super()._decode(*args, **kwargs) text = text.replace(" ", "").replace("\u2582", " ").replace("\u2583", "\n") return text
27182812/ChatGLM-LLaMA-chinese-insturct
4,069
src/transformers/models/convbert/__init__.py
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_tokenizers_available, is_torch_available, ) _import_structure = { "configuration_convbert": ["CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ConvBertConfig", "ConvBertOnnxConfig"], "tokenization_convbert": ["ConvBertTokenizer"], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["tokenization_convbert_fast"] = ["ConvBertTokenizerFast"] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_convbert"] = [ "CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "ConvBertForMaskedLM", "ConvBertForMultipleChoice", "ConvBertForQuestionAnswering", "ConvBertForSequenceClassification", "ConvBertForTokenClassification", "ConvBertLayer", "ConvBertModel", "ConvBertPreTrainedModel", "load_tf_weights_in_convbert", ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_tf_convbert"] = [ "TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "TFConvBertForMaskedLM", "TFConvBertForMultipleChoice", "TFConvBertForQuestionAnswering", "TFConvBertForSequenceClassification", "TFConvBertForTokenClassification", "TFConvBertLayer", "TFConvBertModel", "TFConvBertPreTrainedModel", ] if TYPE_CHECKING: from .configuration_convbert import CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, ConvBertConfig, ConvBertOnnxConfig from .tokenization_convbert import ConvBertTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_convbert_fast import ConvBertTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_convbert import ( CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST, ConvBertForMaskedLM, ConvBertForMultipleChoice, ConvBertForQuestionAnswering, ConvBertForSequenceClassification, ConvBertForTokenClassification, ConvBertLayer, ConvBertModel, ConvBertPreTrainedModel, load_tf_weights_in_convbert, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_convbert import ( TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST, TFConvBertForMaskedLM, TFConvBertForMultipleChoice, TFConvBertForQuestionAnswering, TFConvBertForSequenceClassification, TFConvBertForTokenClassification, TFConvBertLayer, TFConvBertModel, TFConvBertPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
27182812/ChatGLM-LLaMA-chinese-insturct
2,108
src/transformers/models/convbert/convert_convbert_original_tf1_checkpoint_to_pytorch_and_tf2.py
# coding=utf-8 # Copyright 2020 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Convert ConvBERT checkpoint.""" import argparse from transformers import ConvBertConfig, ConvBertModel, TFConvBertModel, load_tf_weights_in_convbert from transformers.utils import logging logging.set_verbosity_info() def convert_orig_tf1_checkpoint_to_pytorch(tf_checkpoint_path, convbert_config_file, pytorch_dump_path): conf = ConvBertConfig.from_json_file(convbert_config_file) model = ConvBertModel(conf) model = load_tf_weights_in_convbert(model, conf, tf_checkpoint_path) model.save_pretrained(pytorch_dump_path) tf_model = TFConvBertModel.from_pretrained(pytorch_dump_path, from_pt=True) tf_model.save_pretrained(pytorch_dump_path) if __name__ == "__main__": parser = argparse.ArgumentParser() # Required parameters parser.add_argument( "--tf_checkpoint_path", default=None, type=str, required=True, help="Path to the TensorFlow checkpoint path." ) parser.add_argument( "--convbert_config_file", default=None, type=str, required=True, help=( "The config json file corresponding to the pre-trained ConvBERT model. \n" "This specifies the model architecture." ), ) parser.add_argument( "--pytorch_dump_path", default=None, type=str, required=True, help="Path to the output PyTorch model." ) args = parser.parse_args() convert_orig_tf1_checkpoint_to_pytorch(args.tf_checkpoint_path, args.convbert_config_file, args.pytorch_dump_path)
27182812/ChatGLM-LLaMA-chinese-insturct
8,765
src/transformers/models/convbert/tokenization_convbert_fast.py
# coding=utf-8 # Copyright The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenization classes for ConvBERT.""" import json from typing import List, Optional, Tuple from tokenizers import normalizers from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_convbert import ConvBertTokenizer logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "YituTech/conv-bert-base": "https://huggingface.co/YituTech/conv-bert-base/resolve/main/vocab.txt", "YituTech/conv-bert-medium-small": ( "https://huggingface.co/YituTech/conv-bert-medium-small/resolve/main/vocab.txt" ), "YituTech/conv-bert-small": "https://huggingface.co/YituTech/conv-bert-small/resolve/main/vocab.txt", } } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "YituTech/conv-bert-base": 512, "YituTech/conv-bert-medium-small": 512, "YituTech/conv-bert-small": 512, } PRETRAINED_INIT_CONFIGURATION = { "YituTech/conv-bert-base": {"do_lower_case": True}, "YituTech/conv-bert-medium-small": {"do_lower_case": True}, "YituTech/conv-bert-small": {"do_lower_case": True}, } # Copied from transformers.models.bert.tokenization_bert_fast.BertTokenizerFast with bert-base-cased->YituTech/conv-bert-base, Bert->ConvBert, BERT->ConvBERT class ConvBertTokenizerFast(PreTrainedTokenizerFast): r""" Construct a "fast" ConvBERT tokenizer (backed by HuggingFace's *tokenizers* library). Based on WordPiece. This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): File containing the vocabulary. do_lower_case (`bool`, *optional*, defaults to `True`): Whether or not to lowercase the input when tokenizing. unk_token (`str`, *optional*, defaults to `"[UNK]"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"[SEP]"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"[PAD]"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"[CLS]"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"[MASK]"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. clean_text (`bool`, *optional*, defaults to `True`): Whether or not to clean the text before tokenization by removing any control characters and replacing all whitespaces by the classic one. tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see [this issue](https://github.com/huggingface/transformers/issues/328)). strip_accents (`bool`, *optional*): Whether or not to strip all accents. If this option is not specified, then it will be determined by the value for `lowercase` (as in the original ConvBERT). wordpieces_prefix (`str`, *optional*, defaults to `"##"`): The prefix for subwords. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES slow_tokenizer_class = ConvBertTokenizer def __init__( self, vocab_file=None, tokenizer_file=None, do_lower_case=True, unk_token="[UNK]", sep_token="[SEP]", pad_token="[PAD]", cls_token="[CLS]", mask_token="[MASK]", tokenize_chinese_chars=True, strip_accents=None, **kwargs, ): super().__init__( vocab_file, tokenizer_file=tokenizer_file, do_lower_case=do_lower_case, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, **kwargs, ) normalizer_state = json.loads(self.backend_tokenizer.normalizer.__getstate__()) if ( normalizer_state.get("lowercase", do_lower_case) != do_lower_case or normalizer_state.get("strip_accents", strip_accents) != strip_accents or normalizer_state.get("handle_chinese_chars", tokenize_chinese_chars) != tokenize_chinese_chars ): normalizer_class = getattr(normalizers, normalizer_state.pop("type")) normalizer_state["lowercase"] = do_lower_case normalizer_state["strip_accents"] = strip_accents normalizer_state["handle_chinese_chars"] = tokenize_chinese_chars self.backend_tokenizer.normalizer = normalizer_class(**normalizer_state) self.do_lower_case = do_lower_case def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None): """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A ConvBERT sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id] if token_ids_1: output += token_ids_1 + [self.sep_token_id] return output def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. A ConvBERT sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: files = self._tokenizer.model.save(save_directory, name=filename_prefix) return tuple(files)
27182812/ChatGLM-LLaMA-chinese-insturct
56,248
src/transformers/models/convbert/modeling_tf_convbert.py
# coding=utf-8 # Copyright 2021 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TF 2.0 ConvBERT model.""" from typing import Optional, Tuple, Union import numpy as np import tensorflow as tf from ...activations_tf import get_tf_activation from ...modeling_tf_outputs import ( TFBaseModelOutput, TFMaskedLMOutput, TFMultipleChoiceModelOutput, TFQuestionAnsweringModelOutput, TFSequenceClassifierOutput, TFTokenClassifierOutput, ) from ...modeling_tf_utils import ( TFMaskedLanguageModelingLoss, TFModelInputType, TFMultipleChoiceLoss, TFPreTrainedModel, TFQuestionAnsweringLoss, TFSequenceClassificationLoss, TFSequenceSummary, TFTokenClassificationLoss, get_initializer, keras_serializable, unpack_inputs, ) from ...tf_utils import shape_list, stable_softmax from ...utils import ( MULTIPLE_CHOICE_DUMMY_INPUTS, add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, ) from .configuration_convbert import ConvBertConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "YituTech/conv-bert-base" _CONFIG_FOR_DOC = "ConvBertConfig" TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ "YituTech/conv-bert-base", "YituTech/conv-bert-medium-small", "YituTech/conv-bert-small", # See all ConvBERT models at https://huggingface.co/models?filter=convbert ] # Copied from transformers.models.albert.modeling_tf_albert.TFAlbertEmbeddings with Albert->ConvBert class TFConvBertEmbeddings(tf.keras.layers.Layer): """Construct the embeddings from word, position and token_type embeddings.""" def __init__(self, config: ConvBertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.embedding_size = config.embedding_size self.max_position_embeddings = config.max_position_embeddings self.initializer_range = config.initializer_range self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") self.dropout = tf.keras.layers.Dropout(rate=config.hidden_dropout_prob) def build(self, input_shape: tf.TensorShape): with tf.name_scope("word_embeddings"): self.weight = self.add_weight( name="weight", shape=[self.config.vocab_size, self.embedding_size], initializer=get_initializer(self.initializer_range), ) with tf.name_scope("token_type_embeddings"): self.token_type_embeddings = self.add_weight( name="embeddings", shape=[self.config.type_vocab_size, self.embedding_size], initializer=get_initializer(self.initializer_range), ) with tf.name_scope("position_embeddings"): self.position_embeddings = self.add_weight( name="embeddings", shape=[self.max_position_embeddings, self.embedding_size], initializer=get_initializer(self.initializer_range), ) super().build(input_shape) # Copied from transformers.models.bert.modeling_tf_bert.TFBertEmbeddings.call def call( self, input_ids: tf.Tensor = None, position_ids: tf.Tensor = None, token_type_ids: tf.Tensor = None, inputs_embeds: tf.Tensor = None, past_key_values_length=0, training: bool = False, ) -> tf.Tensor: """ Applies embedding based on inputs tensor. Returns: final_embeddings (`tf.Tensor`): output embedding tensor. """ if input_ids is None and inputs_embeds is None: raise ValueError("Need to provide either `input_ids` or `input_embeds`.") if input_ids is not None: # Note: tf.gather, on which the embedding layer is based, won't check positive out of bound # indices on GPU, returning zeros instead. This is a dangerous silent behavior. tf.debugging.assert_less( input_ids, tf.cast(self.config.vocab_size, dtype=input_ids.dtype), message=( "input_ids must be smaller than the embedding layer's input dimension (got" f" {tf.math.reduce_max(input_ids)} >= {self.config.vocab_size})" ), ) inputs_embeds = tf.gather(params=self.weight, indices=input_ids) input_shape = shape_list(inputs_embeds)[:-1] if token_type_ids is None: token_type_ids = tf.fill(dims=input_shape, value=0) if position_ids is None: position_ids = tf.expand_dims( tf.range(start=past_key_values_length, limit=input_shape[1] + past_key_values_length), axis=0 ) position_embeds = tf.gather(params=self.position_embeddings, indices=position_ids) token_type_embeds = tf.gather(params=self.token_type_embeddings, indices=token_type_ids) final_embeddings = inputs_embeds + position_embeds + token_type_embeds final_embeddings = self.LayerNorm(inputs=final_embeddings) final_embeddings = self.dropout(inputs=final_embeddings, training=training) return final_embeddings class TFConvBertSelfAttention(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) if config.hidden_size % config.num_attention_heads != 0: raise ValueError( f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention " f"heads ({config.num_attention_heads})" ) new_num_attention_heads = int(config.num_attention_heads / config.head_ratio) if new_num_attention_heads < 1: self.head_ratio = config.num_attention_heads num_attention_heads = 1 else: num_attention_heads = new_num_attention_heads self.head_ratio = config.head_ratio self.num_attention_heads = num_attention_heads self.conv_kernel_size = config.conv_kernel_size if config.hidden_size % self.num_attention_heads != 0: raise ValueError("hidden_size should be divisible by num_attention_heads") self.attention_head_size = config.hidden_size // config.num_attention_heads self.all_head_size = self.num_attention_heads * self.attention_head_size self.query = tf.keras.layers.Dense( self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="query" ) self.key = tf.keras.layers.Dense( self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="key" ) self.value = tf.keras.layers.Dense( self.all_head_size, kernel_initializer=get_initializer(config.initializer_range), name="value" ) self.key_conv_attn_layer = tf.keras.layers.SeparableConv1D( self.all_head_size, self.conv_kernel_size, padding="same", activation=None, depthwise_initializer=get_initializer(1 / self.conv_kernel_size), pointwise_initializer=get_initializer(config.initializer_range), name="key_conv_attn_layer", ) self.conv_kernel_layer = tf.keras.layers.Dense( self.num_attention_heads * self.conv_kernel_size, activation=None, name="conv_kernel_layer", kernel_initializer=get_initializer(config.initializer_range), ) self.conv_out_layer = tf.keras.layers.Dense( self.all_head_size, activation=None, name="conv_out_layer", kernel_initializer=get_initializer(config.initializer_range), ) self.dropout = tf.keras.layers.Dropout(config.attention_probs_dropout_prob) def transpose_for_scores(self, x, batch_size): # Reshape from [batch_size, seq_length, all_head_size] to [batch_size, seq_length, num_attention_heads, attention_head_size] x = tf.reshape(x, (batch_size, -1, self.num_attention_heads, self.attention_head_size)) return tf.transpose(x, perm=[0, 2, 1, 3]) def call(self, hidden_states, attention_mask, head_mask, output_attentions, training=False): batch_size = shape_list(hidden_states)[0] mixed_query_layer = self.query(hidden_states) mixed_key_layer = self.key(hidden_states) mixed_value_layer = self.value(hidden_states) mixed_key_conv_attn_layer = self.key_conv_attn_layer(hidden_states) query_layer = self.transpose_for_scores(mixed_query_layer, batch_size) key_layer = self.transpose_for_scores(mixed_key_layer, batch_size) conv_attn_layer = tf.multiply(mixed_key_conv_attn_layer, mixed_query_layer) conv_kernel_layer = self.conv_kernel_layer(conv_attn_layer) conv_kernel_layer = tf.reshape(conv_kernel_layer, [-1, self.conv_kernel_size, 1]) conv_kernel_layer = stable_softmax(conv_kernel_layer, axis=1) paddings = tf.constant( [ [ 0, 0, ], [int((self.conv_kernel_size - 1) / 2), int((self.conv_kernel_size - 1) / 2)], [0, 0], ] ) conv_out_layer = self.conv_out_layer(hidden_states) conv_out_layer = tf.reshape(conv_out_layer, [batch_size, -1, self.all_head_size]) conv_out_layer = tf.pad(conv_out_layer, paddings, "CONSTANT") unfold_conv_out_layer = tf.stack( [ tf.slice(conv_out_layer, [0, i, 0], [batch_size, shape_list(mixed_query_layer)[1], self.all_head_size]) for i in range(self.conv_kernel_size) ], axis=-1, ) conv_out_layer = tf.reshape(unfold_conv_out_layer, [-1, self.attention_head_size, self.conv_kernel_size]) conv_out_layer = tf.matmul(conv_out_layer, conv_kernel_layer) conv_out_layer = tf.reshape(conv_out_layer, [-1, self.all_head_size]) # Take the dot product between "query" and "key" to get the raw attention scores. attention_scores = tf.matmul( query_layer, key_layer, transpose_b=True ) # (batch size, num_heads, seq_len_q, seq_len_k) dk = tf.cast(shape_list(key_layer)[-1], attention_scores.dtype) # scale attention_scores attention_scores = attention_scores / tf.math.sqrt(dk) if attention_mask is not None: # Apply the attention mask is (precomputed for all layers in TFBertModel call() function) attention_scores = attention_scores + attention_mask # Normalize the attention scores to probabilities. attention_probs = stable_softmax(attention_scores, axis=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs, training=training) # Mask heads if we want to if head_mask is not None: attention_probs = attention_probs * head_mask value_layer = tf.reshape( mixed_value_layer, [batch_size, -1, self.num_attention_heads, self.attention_head_size] ) value_layer = tf.transpose(value_layer, [0, 2, 1, 3]) context_layer = tf.matmul(attention_probs, value_layer) context_layer = tf.transpose(context_layer, perm=[0, 2, 1, 3]) conv_out = tf.reshape(conv_out_layer, [batch_size, -1, self.num_attention_heads, self.attention_head_size]) context_layer = tf.concat([context_layer, conv_out], 2) context_layer = tf.reshape( context_layer, (batch_size, -1, self.head_ratio * self.all_head_size) ) # (batch_size, seq_len_q, all_head_size) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) return outputs class TFConvBertSelfOutput(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) def call(self, hidden_states, input_tensor, training=False): hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class TFConvBertAttention(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.self_attention = TFConvBertSelfAttention(config, name="self") self.dense_output = TFConvBertSelfOutput(config, name="output") def prune_heads(self, heads): raise NotImplementedError def call(self, input_tensor, attention_mask, head_mask, output_attentions, training=False): self_outputs = self.self_attention( input_tensor, attention_mask, head_mask, output_attentions, training=training ) attention_output = self.dense_output(self_outputs[0], input_tensor, training=training) outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them return outputs class GroupedLinearLayer(tf.keras.layers.Layer): def __init__(self, input_size, output_size, num_groups, kernel_initializer, **kwargs): super().__init__(**kwargs) self.input_size = input_size self.output_size = output_size self.num_groups = num_groups self.kernel_initializer = kernel_initializer self.group_in_dim = self.input_size // self.num_groups self.group_out_dim = self.output_size // self.num_groups def build(self, input_shape): self.kernel = self.add_weight( "kernel", shape=[self.group_out_dim, self.group_in_dim, self.num_groups], initializer=self.kernel_initializer, trainable=True, ) self.bias = self.add_weight( "bias", shape=[self.output_size], initializer=self.kernel_initializer, dtype=self.dtype, trainable=True ) def call(self, hidden_states): batch_size = shape_list(hidden_states)[0] x = tf.transpose(tf.reshape(hidden_states, [-1, self.num_groups, self.group_in_dim]), [1, 0, 2]) x = tf.matmul(x, tf.transpose(self.kernel, [2, 1, 0])) x = tf.transpose(x, [1, 0, 2]) x = tf.reshape(x, [batch_size, -1, self.output_size]) x = tf.nn.bias_add(value=x, bias=self.bias) return x class TFConvBertIntermediate(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) if config.num_groups == 1: self.dense = tf.keras.layers.Dense( config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) else: self.dense = GroupedLinearLayer( config.hidden_size, config.intermediate_size, num_groups=config.num_groups, kernel_initializer=get_initializer(config.initializer_range), name="dense", ) if isinstance(config.hidden_act, str): self.intermediate_act_fn = get_tf_activation(config.hidden_act) else: self.intermediate_act_fn = config.hidden_act def call(self, hidden_states): hidden_states = self.dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states class TFConvBertOutput(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) if config.num_groups == 1: self.dense = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) else: self.dense = GroupedLinearLayer( config.intermediate_size, config.hidden_size, num_groups=config.num_groups, kernel_initializer=get_initializer(config.initializer_range), name="dense", ) self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) def call(self, hidden_states, input_tensor, training=False): hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class TFConvBertLayer(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.attention = TFConvBertAttention(config, name="attention") self.intermediate = TFConvBertIntermediate(config, name="intermediate") self.bert_output = TFConvBertOutput(config, name="output") def call(self, hidden_states, attention_mask, head_mask, output_attentions, training=False): attention_outputs = self.attention( hidden_states, attention_mask, head_mask, output_attentions, training=training ) attention_output = attention_outputs[0] intermediate_output = self.intermediate(attention_output) layer_output = self.bert_output(intermediate_output, attention_output, training=training) outputs = (layer_output,) + attention_outputs[1:] # add attentions if we output them return outputs class TFConvBertEncoder(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.layer = [TFConvBertLayer(config, name=f"layer_._{i}") for i in range(config.num_hidden_layers)] def call( self, hidden_states, attention_mask, head_mask, output_attentions, output_hidden_states, return_dict, training=False, ): all_hidden_states = () if output_hidden_states else None all_attentions = () if output_attentions else None for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_outputs = layer_module( hidden_states, attention_mask, head_mask[i], output_attentions, training=training ) hidden_states = layer_outputs[0] if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) # Add last layer if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, all_hidden_states, all_attentions] if v is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_attentions ) class TFConvBertPredictionHeadTransform(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( config.embedding_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) if isinstance(config.hidden_act, str): self.transform_act_fn = get_tf_activation(config.hidden_act) else: self.transform_act_fn = config.hidden_act self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") def call(self, hidden_states): hidden_states = self.dense(hidden_states) hidden_states = self.transform_act_fn(hidden_states) hidden_states = self.LayerNorm(hidden_states) return hidden_states @keras_serializable class TFConvBertMainLayer(tf.keras.layers.Layer): config_class = ConvBertConfig def __init__(self, config, **kwargs): super().__init__(**kwargs) self.embeddings = TFConvBertEmbeddings(config, name="embeddings") if config.embedding_size != config.hidden_size: self.embeddings_project = tf.keras.layers.Dense(config.hidden_size, name="embeddings_project") self.encoder = TFConvBertEncoder(config, name="encoder") self.config = config def get_input_embeddings(self): return self.embeddings def set_input_embeddings(self, value): self.embeddings.weight = value self.embeddings.vocab_size = value.shape[0] def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ raise NotImplementedError def get_extended_attention_mask(self, attention_mask, input_shape, dtype): if attention_mask is None: attention_mask = tf.fill(input_shape, 1) # We create a 3D attention mask from a 2D tensor mask. # Sizes are [batch_size, 1, 1, to_seq_length] # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] # this attention mask is more simple than the triangular masking of causal attention # used in OpenAI GPT, we just need to prepare the broadcast dimension here. extended_attention_mask = tf.reshape(attention_mask, (input_shape[0], 1, 1, input_shape[1])) # Since attention_mask is 1.0 for positions we want to attend and 0.0 for # masked positions, this operation will create a tensor which is 0.0 for # positions we want to attend and -10000.0 for masked positions. # Since we are adding it to the raw scores before the softmax, this is # effectively the same as removing these entirely. extended_attention_mask = tf.cast(extended_attention_mask, dtype) extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 return extended_attention_mask def get_head_mask(self, head_mask): if head_mask is not None: raise NotImplementedError else: head_mask = [None] * self.config.num_hidden_layers return head_mask @unpack_inputs def call( self, input_ids=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, inputs_embeds=None, output_attentions=None, output_hidden_states=None, return_dict=None, training=False, ): if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = shape_list(input_ids) elif inputs_embeds is not None: input_shape = shape_list(inputs_embeds)[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if attention_mask is None: attention_mask = tf.fill(input_shape, 1) if token_type_ids is None: token_type_ids = tf.fill(input_shape, 0) hidden_states = self.embeddings(input_ids, position_ids, token_type_ids, inputs_embeds, training=training) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, hidden_states.dtype) head_mask = self.get_head_mask(head_mask) if hasattr(self, "embeddings_project"): hidden_states = self.embeddings_project(hidden_states, training=training) hidden_states = self.encoder( hidden_states, extended_attention_mask, head_mask, output_attentions, output_hidden_states, return_dict, training=training, ) return hidden_states class TFConvBertPreTrainedModel(TFPreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = ConvBertConfig base_model_prefix = "convbert" CONVBERT_START_DOCSTRING = r""" This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage and behavior. <Tip> TensorFlow models and layers in `transformers` accept two formats as input: - having all inputs as keyword arguments (like PyTorch models), or - having all inputs as a list, tuple or dict in the first positional argument. The reason the second format is supported is that Keras methods prefer this format when passing inputs to models and layers. Because of this support, when using methods like `model.fit()` things should "just work" for you - just pass your inputs and labels in any format that `model.fit()` supports! If, however, you want to use the second format outside of Keras methods like `fit()` and `predict()`, such as when creating your own layers or models with the Keras `Functional` API, there are three possibilities you can use to gather all the input Tensors in the first positional argument: - a single Tensor with `input_ids` only and nothing else: `model(input_ids)` - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring: `model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])` - a dictionary with one or several input Tensors associated to the input names given in the docstring: `model({"input_ids": input_ids, "token_type_ids": token_type_ids})` Note that when creating models and layers with [subclassing](https://keras.io/guides/making_new_layers_and_models_via_subclassing/) then you don't need to worry about any of this, as you can just pass inputs like you would to any other Python function! </Tip> Args: config ([`ConvBertConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ CONVBERT_INPUTS_DOCSTRING = r""" Args: input_ids (`Numpy array` or `tf.Tensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`Numpy array` or `tf.Tensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`Numpy array` or `tf.Tensor` of shape `({0})`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) position_ids (`Numpy array` or `tf.Tensor` of shape `({0})`, *optional*): Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`Numpy array` or `tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`tf.Tensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the config will be used instead. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in eager mode, in graph mode the value will always be set to True. training (`bool`, *optional*, defaults to `False`): Whether or not to use the model in training mode (some modules like dropout modules have different behaviors between training and evaluation). """ @add_start_docstrings( "The bare ConvBERT Model transformer outputting raw hidden-states without any specific head on top.", CONVBERT_START_DOCSTRING, ) class TFConvBertModel(TFConvBertPreTrainedModel): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.convbert = TFConvBertMainLayer(config, name="convbert") @unpack_inputs @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.array, tf.Tensor]] = None, token_type_ids: Optional[Union[np.array, tf.Tensor]] = None, position_ids: Optional[Union[np.array, tf.Tensor]] = None, head_mask: Optional[Union[np.array, tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: bool = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: outputs = self.convbert( input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) return outputs def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) class TFConvBertMaskedLMHead(tf.keras.layers.Layer): def __init__(self, config, input_embeddings, **kwargs): super().__init__(**kwargs) self.config = config self.embedding_size = config.embedding_size self.input_embeddings = input_embeddings def build(self, input_shape): self.bias = self.add_weight(shape=(self.config.vocab_size,), initializer="zeros", trainable=True, name="bias") super().build(input_shape) def get_output_embeddings(self): return self.input_embeddings def set_output_embeddings(self, value): self.input_embeddings.weight = value self.input_embeddings.vocab_size = shape_list(value)[0] def get_bias(self): return {"bias": self.bias} def set_bias(self, value): self.bias = value["bias"] self.config.vocab_size = shape_list(value["bias"])[0] def call(self, hidden_states): seq_length = shape_list(tensor=hidden_states)[1] hidden_states = tf.reshape(tensor=hidden_states, shape=[-1, self.embedding_size]) hidden_states = tf.matmul(a=hidden_states, b=self.input_embeddings.weight, transpose_b=True) hidden_states = tf.reshape(tensor=hidden_states, shape=[-1, seq_length, self.config.vocab_size]) hidden_states = tf.nn.bias_add(value=hidden_states, bias=self.bias) return hidden_states class TFConvBertGeneratorPredictions(tf.keras.layers.Layer): def __init__(self, config, **kwargs): super().__init__(**kwargs) self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm") self.dense = tf.keras.layers.Dense(config.embedding_size, name="dense") def call(self, generator_hidden_states, training=False): hidden_states = self.dense(generator_hidden_states) hidden_states = get_tf_activation("gelu")(hidden_states) hidden_states = self.LayerNorm(hidden_states) return hidden_states @add_start_docstrings("""ConvBERT Model with a `language modeling` head on top.""", CONVBERT_START_DOCSTRING) class TFConvBertForMaskedLM(TFConvBertPreTrainedModel, TFMaskedLanguageModelingLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, **kwargs) self.config = config self.convbert = TFConvBertMainLayer(config, name="convbert") self.generator_predictions = TFConvBertGeneratorPredictions(config, name="generator_predictions") if isinstance(config.hidden_act, str): self.activation = get_tf_activation(config.hidden_act) else: self.activation = config.hidden_act self.generator_lm_head = TFConvBertMaskedLMHead(config, self.convbert.embeddings, name="generator_lm_head") def get_lm_head(self): return self.generator_lm_head def get_prefix_bias_name(self): return self.name + "/" + self.generator_lm_head.name @unpack_inputs @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFMaskedLMOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Union[Tuple, TFMaskedLMOutput]: r""" labels (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]` """ generator_hidden_states = self.convbert( input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) generator_sequence_output = generator_hidden_states[0] prediction_scores = self.generator_predictions(generator_sequence_output, training=training) prediction_scores = self.generator_lm_head(prediction_scores, training=training) loss = None if labels is None else self.hf_compute_loss(labels, prediction_scores) if not return_dict: output = (prediction_scores,) + generator_hidden_states[1:] return ((loss,) + output) if loss is not None else output return TFMaskedLMOutput( loss=loss, logits=prediction_scores, hidden_states=generator_hidden_states.hidden_states, attentions=generator_hidden_states.attentions, ) # Copied from transformers.models.bert.modeling_tf_bert.TFBertForMaskedLM.serving_output def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFMaskedLMOutput(logits=output.logits, hidden_states=hs, attentions=attns) class TFConvBertClassificationHead(tf.keras.layers.Layer): """Head for sentence-level classification tasks.""" def __init__(self, config, **kwargs): super().__init__(**kwargs) self.dense = tf.keras.layers.Dense( config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense" ) classifier_dropout = ( config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob ) self.dropout = tf.keras.layers.Dropout(classifier_dropout) self.out_proj = tf.keras.layers.Dense( config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="out_proj" ) self.config = config def call(self, hidden_states, **kwargs): x = hidden_states[:, 0, :] # take <s> token (equiv. to [CLS]) x = self.dropout(x) x = self.dense(x) x = get_tf_activation(self.config.hidden_act)(x) x = self.dropout(x) x = self.out_proj(x) return x @add_start_docstrings( """ ConvBERT Model transformer with a sequence classification/regression head on top e.g., for GLUE tasks. """, CONVBERT_START_DOCSTRING, ) class TFConvBertForSequenceClassification(TFConvBertPreTrainedModel, TFSequenceClassificationLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.num_labels = config.num_labels self.convbert = TFConvBertMainLayer(config, name="convbert") self.classifier = TFConvBertClassificationHead(config, name="classifier") @unpack_inputs @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFSequenceClassifierOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Union[Tuple, TFSequenceClassifierOutput]: r""" labels (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ outputs = self.convbert( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) logits = self.classifier(outputs[0], training=training) loss = None if labels is None else self.hf_compute_loss(labels, logits) if not return_dict: output = (logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return TFSequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFSequenceClassifierOutput(logits=output.logits, hidden_states=hs, attentions=attns) @add_start_docstrings( """ ConvBERT Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a softmax) e.g. for RocStories/SWAG tasks. """, CONVBERT_START_DOCSTRING, ) class TFConvBertForMultipleChoice(TFConvBertPreTrainedModel, TFMultipleChoiceLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.convbert = TFConvBertMainLayer(config, name="convbert") self.sequence_summary = TFSequenceSummary( config, initializer_range=config.initializer_range, name="sequence_summary" ) self.classifier = tf.keras.layers.Dense( 1, kernel_initializer=get_initializer(config.initializer_range), name="classifier" ) @property def dummy_inputs(self): """ Dummy inputs to build the network. Returns: tf.Tensor with dummy inputs """ return {"input_ids": tf.convert_to_tensor(MULTIPLE_CHOICE_DUMMY_INPUTS, dtype=tf.int32)} @unpack_inputs @add_start_docstrings_to_model_forward( CONVBERT_INPUTS_DOCSTRING.format("batch_size, num_choices, sequence_length") ) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFMultipleChoiceModelOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Union[Tuple, TFMultipleChoiceModelOutput]: r""" labels (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for computing the multiple choice classification loss. Indices should be in `[0, ..., num_choices]` where `num_choices` is the size of the second dimension of the input tensors. (See `input_ids` above) """ if input_ids is not None: num_choices = shape_list(input_ids)[1] seq_length = shape_list(input_ids)[2] else: num_choices = shape_list(inputs_embeds)[1] seq_length = shape_list(inputs_embeds)[2] flat_input_ids = tf.reshape(input_ids, (-1, seq_length)) if input_ids is not None else None flat_attention_mask = tf.reshape(attention_mask, (-1, seq_length)) if attention_mask is not None else None flat_token_type_ids = tf.reshape(token_type_ids, (-1, seq_length)) if token_type_ids is not None else None flat_position_ids = tf.reshape(position_ids, (-1, seq_length)) if position_ids is not None else None flat_inputs_embeds = ( tf.reshape(inputs_embeds, (-1, seq_length, shape_list(inputs_embeds)[3])) if inputs_embeds is not None else None ) outputs = self.convbert( flat_input_ids, flat_attention_mask, flat_token_type_ids, flat_position_ids, head_mask, flat_inputs_embeds, output_attentions, output_hidden_states, return_dict=return_dict, training=training, ) logits = self.sequence_summary(outputs[0], training=training) logits = self.classifier(logits) reshaped_logits = tf.reshape(logits, (-1, num_choices)) loss = None if labels is None else self.hf_compute_loss(labels, reshaped_logits) if not return_dict: output = (reshaped_logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return TFMultipleChoiceModelOutput( loss=loss, logits=reshaped_logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @tf.function( input_signature=[ { "input_ids": tf.TensorSpec((None, None, None), tf.int32, name="input_ids"), "attention_mask": tf.TensorSpec((None, None, None), tf.int32, name="attention_mask"), "token_type_ids": tf.TensorSpec((None, None, None), tf.int32, name="token_type_ids"), } ] ) def serving(self, inputs): output = self.call(inputs) return self.serving_output(output) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFMultipleChoiceModelOutput(logits=output.logits, hidden_states=hs, attentions=attns) @add_start_docstrings( """ ConvBERT Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. """, CONVBERT_START_DOCSTRING, ) class TFConvBertForTokenClassification(TFConvBertPreTrainedModel, TFTokenClassificationLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.num_labels = config.num_labels self.convbert = TFConvBertMainLayer(config, name="convbert") classifier_dropout = ( config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob ) self.dropout = tf.keras.layers.Dropout(classifier_dropout) self.classifier = tf.keras.layers.Dense( config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="classifier" ) @unpack_inputs @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFTokenClassifierOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, labels: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Union[Tuple, TFTokenClassifierOutput]: r""" labels (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`. """ outputs = self.convbert( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) sequence_output = outputs[0] sequence_output = self.dropout(sequence_output, training=training) logits = self.classifier(sequence_output) loss = None if labels is None else self.hf_compute_loss(labels, logits) if not return_dict: output = (logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return TFTokenClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFTokenClassifierOutput(logits=output.logits, hidden_states=hs, attentions=attns) @add_start_docstrings( """ ConvBERT Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layer on top of the hidden-states output to compute `span start logits` and `span end logits`). """, CONVBERT_START_DOCSTRING, ) class TFConvBertForQuestionAnswering(TFConvBertPreTrainedModel, TFQuestionAnsweringLoss): def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.num_labels = config.num_labels self.convbert = TFConvBertMainLayer(config, name="convbert") self.qa_outputs = tf.keras.layers.Dense( config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="qa_outputs" ) @unpack_inputs @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TFQuestionAnsweringModelOutput, config_class=_CONFIG_FOR_DOC, ) def call( self, input_ids: Optional[TFModelInputType] = None, attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, token_type_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, start_positions: Optional[tf.Tensor] = None, end_positions: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Union[Tuple, TFQuestionAnsweringModelOutput]: r""" start_positions (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the start of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. end_positions (`tf.Tensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the end of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. """ outputs = self.convbert( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) sequence_output = outputs[0] logits = self.qa_outputs(sequence_output) start_logits, end_logits = tf.split(logits, 2, axis=-1) start_logits = tf.squeeze(start_logits, axis=-1) end_logits = tf.squeeze(end_logits, axis=-1) loss = None if start_positions is not None and end_positions is not None: labels = {"start_position": start_positions} labels["end_position"] = end_positions loss = self.hf_compute_loss(labels, (start_logits, end_logits)) if not return_dict: output = (start_logits, end_logits) + outputs[1:] return ((loss,) + output) if loss is not None else output return TFQuestionAnsweringModelOutput( loss=loss, start_logits=start_logits, end_logits=end_logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFQuestionAnsweringModelOutput( start_logits=output.start_logits, end_logits=output.end_logits, hidden_states=hs, attentions=attns )
27182812/ChatGLM-LLaMA-chinese-insturct
59,241
src/transformers/models/convbert/modeling_convbert.py
# coding=utf-8 # Copyright 2021 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch ConvBERT model.""" import math import os from operator import attrgetter from typing import Optional, Tuple, Union import torch import torch.utils.checkpoint from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACT2FN, get_activation from ...modeling_outputs import ( BaseModelOutputWithCrossAttentions, MaskedLMOutput, MultipleChoiceModelOutput, QuestionAnsweringModelOutput, SequenceClassifierOutput, TokenClassifierOutput, ) from ...modeling_utils import PreTrainedModel, SequenceSummary from ...pytorch_utils import apply_chunking_to_forward, find_pruneable_heads_and_indices, prune_linear_layer from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging from .configuration_convbert import ConvBertConfig logger = logging.get_logger(__name__) _CHECKPOINT_FOR_DOC = "YituTech/conv-bert-base" _CONFIG_FOR_DOC = "ConvBertConfig" CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ "YituTech/conv-bert-base", "YituTech/conv-bert-medium-small", "YituTech/conv-bert-small", # See all ConvBERT models at https://huggingface.co/models?filter=convbert ] def load_tf_weights_in_convbert(model, config, tf_checkpoint_path): """Load tf checkpoints in a pytorch model.""" try: import tensorflow as tf except ImportError: logger.error( "Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see " "https://www.tensorflow.org/install/ for installation instructions." ) raise tf_path = os.path.abspath(tf_checkpoint_path) logger.info(f"Converting TensorFlow checkpoint from {tf_path}") # Load weights from TF model init_vars = tf.train.list_variables(tf_path) tf_data = {} for name, shape in init_vars: logger.info(f"Loading TF weight {name} with shape {shape}") array = tf.train.load_variable(tf_path, name) tf_data[name] = array param_mapping = { "embeddings.word_embeddings.weight": "electra/embeddings/word_embeddings", "embeddings.position_embeddings.weight": "electra/embeddings/position_embeddings", "embeddings.token_type_embeddings.weight": "electra/embeddings/token_type_embeddings", "embeddings.LayerNorm.weight": "electra/embeddings/LayerNorm/gamma", "embeddings.LayerNorm.bias": "electra/embeddings/LayerNorm/beta", "embeddings_project.weight": "electra/embeddings_project/kernel", "embeddings_project.bias": "electra/embeddings_project/bias", } if config.num_groups > 1: group_dense_name = "g_dense" else: group_dense_name = "dense" for j in range(config.num_hidden_layers): param_mapping[ f"encoder.layer.{j}.attention.self.query.weight" ] = f"electra/encoder/layer_{j}/attention/self/query/kernel" param_mapping[ f"encoder.layer.{j}.attention.self.query.bias" ] = f"electra/encoder/layer_{j}/attention/self/query/bias" param_mapping[ f"encoder.layer.{j}.attention.self.key.weight" ] = f"electra/encoder/layer_{j}/attention/self/key/kernel" param_mapping[ f"encoder.layer.{j}.attention.self.key.bias" ] = f"electra/encoder/layer_{j}/attention/self/key/bias" param_mapping[ f"encoder.layer.{j}.attention.self.value.weight" ] = f"electra/encoder/layer_{j}/attention/self/value/kernel" param_mapping[ f"encoder.layer.{j}.attention.self.value.bias" ] = f"electra/encoder/layer_{j}/attention/self/value/bias" param_mapping[ f"encoder.layer.{j}.attention.self.key_conv_attn_layer.depthwise.weight" ] = f"electra/encoder/layer_{j}/attention/self/conv_attn_key/depthwise_kernel" param_mapping[ f"encoder.layer.{j}.attention.self.key_conv_attn_layer.pointwise.weight" ] = f"electra/encoder/layer_{j}/attention/self/conv_attn_key/pointwise_kernel" param_mapping[ f"encoder.layer.{j}.attention.self.key_conv_attn_layer.bias" ] = f"electra/encoder/layer_{j}/attention/self/conv_attn_key/bias" param_mapping[ f"encoder.layer.{j}.attention.self.conv_kernel_layer.weight" ] = f"electra/encoder/layer_{j}/attention/self/conv_attn_kernel/kernel" param_mapping[ f"encoder.layer.{j}.attention.self.conv_kernel_layer.bias" ] = f"electra/encoder/layer_{j}/attention/self/conv_attn_kernel/bias" param_mapping[ f"encoder.layer.{j}.attention.self.conv_out_layer.weight" ] = f"electra/encoder/layer_{j}/attention/self/conv_attn_point/kernel" param_mapping[ f"encoder.layer.{j}.attention.self.conv_out_layer.bias" ] = f"electra/encoder/layer_{j}/attention/self/conv_attn_point/bias" param_mapping[ f"encoder.layer.{j}.attention.output.dense.weight" ] = f"electra/encoder/layer_{j}/attention/output/dense/kernel" param_mapping[ f"encoder.layer.{j}.attention.output.LayerNorm.weight" ] = f"electra/encoder/layer_{j}/attention/output/LayerNorm/gamma" param_mapping[ f"encoder.layer.{j}.attention.output.dense.bias" ] = f"electra/encoder/layer_{j}/attention/output/dense/bias" param_mapping[ f"encoder.layer.{j}.attention.output.LayerNorm.bias" ] = f"electra/encoder/layer_{j}/attention/output/LayerNorm/beta" param_mapping[ f"encoder.layer.{j}.intermediate.dense.weight" ] = f"electra/encoder/layer_{j}/intermediate/{group_dense_name}/kernel" param_mapping[ f"encoder.layer.{j}.intermediate.dense.bias" ] = f"electra/encoder/layer_{j}/intermediate/{group_dense_name}/bias" param_mapping[ f"encoder.layer.{j}.output.dense.weight" ] = f"electra/encoder/layer_{j}/output/{group_dense_name}/kernel" param_mapping[ f"encoder.layer.{j}.output.dense.bias" ] = f"electra/encoder/layer_{j}/output/{group_dense_name}/bias" param_mapping[ f"encoder.layer.{j}.output.LayerNorm.weight" ] = f"electra/encoder/layer_{j}/output/LayerNorm/gamma" param_mapping[f"encoder.layer.{j}.output.LayerNorm.bias"] = f"electra/encoder/layer_{j}/output/LayerNorm/beta" for param in model.named_parameters(): param_name = param[0] retriever = attrgetter(param_name) result = retriever(model) tf_name = param_mapping[param_name] value = torch.from_numpy(tf_data[tf_name]) logger.info(f"TF: {tf_name}, PT: {param_name} ") if tf_name.endswith("/kernel"): if not tf_name.endswith("/intermediate/g_dense/kernel"): if not tf_name.endswith("/output/g_dense/kernel"): value = value.T if tf_name.endswith("/depthwise_kernel"): value = value.permute(1, 2, 0) # 2, 0, 1 if tf_name.endswith("/pointwise_kernel"): value = value.permute(2, 1, 0) # 2, 1, 0 if tf_name.endswith("/conv_attn_key/bias"): value = value.unsqueeze(-1) result.data = value return model class ConvBertEmbeddings(nn.Module): """Construct the embeddings from word, position and token_type embeddings.""" def __init__(self, config): super().__init__() self.word_embeddings = nn.Embedding(config.vocab_size, config.embedding_size, padding_idx=config.pad_token_id) self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.embedding_size) self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.embedding_size) # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load # any TensorFlow checkpoint file self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout_prob) # position_ids (1, len position emb) is contiguous in memory and exported when serialized self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) self.register_buffer( "token_type_ids", torch.zeros(self.position_ids.size(), dtype=torch.long), persistent=False ) def forward( self, input_ids: Optional[torch.LongTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, ) -> torch.LongTensor: if input_ids is not None: input_shape = input_ids.size() else: input_shape = inputs_embeds.size()[:-1] seq_length = input_shape[1] if position_ids is None: position_ids = self.position_ids[:, :seq_length] # Setting the token_type_ids to the registered buffer in constructor where it is all zeros, which usually occurs # when its auto-generated, registered buffer helps users when tracing the model without passing token_type_ids, solves # issue #5664 if token_type_ids is None: if hasattr(self, "token_type_ids"): buffered_token_type_ids = self.token_type_ids[:, :seq_length] buffered_token_type_ids_expanded = buffered_token_type_ids.expand(input_shape[0], seq_length) token_type_ids = buffered_token_type_ids_expanded else: token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device) if inputs_embeds is None: inputs_embeds = self.word_embeddings(input_ids) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = inputs_embeds + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings class ConvBertPreTrainedModel(PreTrainedModel): """ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained models. """ config_class = ConvBertConfig load_tf_weights = load_tf_weights_in_convbert base_model_prefix = "convbert" supports_gradient_checkpointing = True _keys_to_ignore_on_load_missing = [r"position_ids"] _keys_to_ignore_on_load_unexpected = [r"convbert.embeddings_project.weight", r"convbert.embeddings_project.bias"] def _init_weights(self, module): """Initialize the weights""" if isinstance(module, nn.Linear): # Slightly different from the TF version which uses truncated_normal for initialization # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() elif isinstance(module, nn.LayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, ConvBertEncoder): module.gradient_checkpointing = value class SeparableConv1D(nn.Module): """This class implements separable convolution, i.e. a depthwise and a pointwise layer""" def __init__(self, config, input_filters, output_filters, kernel_size, **kwargs): super().__init__() self.depthwise = nn.Conv1d( input_filters, input_filters, kernel_size=kernel_size, groups=input_filters, padding=kernel_size // 2, bias=False, ) self.pointwise = nn.Conv1d(input_filters, output_filters, kernel_size=1, bias=False) self.bias = nn.Parameter(torch.zeros(output_filters, 1)) self.depthwise.weight.data.normal_(mean=0.0, std=config.initializer_range) self.pointwise.weight.data.normal_(mean=0.0, std=config.initializer_range) def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: x = self.depthwise(hidden_states) x = self.pointwise(x) x += self.bias return x class ConvBertSelfAttention(nn.Module): def __init__(self, config): super().__init__() if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): raise ValueError( f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention " f"heads ({config.num_attention_heads})" ) new_num_attention_heads = config.num_attention_heads // config.head_ratio if new_num_attention_heads < 1: self.head_ratio = config.num_attention_heads self.num_attention_heads = 1 else: self.num_attention_heads = new_num_attention_heads self.head_ratio = config.head_ratio self.conv_kernel_size = config.conv_kernel_size if config.hidden_size % self.num_attention_heads != 0: raise ValueError("hidden_size should be divisible by num_attention_heads") self.attention_head_size = (config.hidden_size // self.num_attention_heads) // 2 self.all_head_size = self.num_attention_heads * self.attention_head_size self.query = nn.Linear(config.hidden_size, self.all_head_size) self.key = nn.Linear(config.hidden_size, self.all_head_size) self.value = nn.Linear(config.hidden_size, self.all_head_size) self.key_conv_attn_layer = SeparableConv1D( config, config.hidden_size, self.all_head_size, self.conv_kernel_size ) self.conv_kernel_layer = nn.Linear(self.all_head_size, self.num_attention_heads * self.conv_kernel_size) self.conv_out_layer = nn.Linear(config.hidden_size, self.all_head_size) self.unfold = nn.Unfold( kernel_size=[self.conv_kernel_size, 1], padding=[int((self.conv_kernel_size - 1) / 2), 0] ) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) def transpose_for_scores(self, x): new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) x = x.view(*new_x_shape) return x.permute(0, 2, 1, 3) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: mixed_query_layer = self.query(hidden_states) batch_size = hidden_states.size(0) # If this is instantiated as a cross-attention module, the keys # and values come from an encoder; the attention mask needs to be # such that the encoder's padding tokens are not attended to. if encoder_hidden_states is not None: mixed_key_layer = self.key(encoder_hidden_states) mixed_value_layer = self.value(encoder_hidden_states) else: mixed_key_layer = self.key(hidden_states) mixed_value_layer = self.value(hidden_states) mixed_key_conv_attn_layer = self.key_conv_attn_layer(hidden_states.transpose(1, 2)) mixed_key_conv_attn_layer = mixed_key_conv_attn_layer.transpose(1, 2) query_layer = self.transpose_for_scores(mixed_query_layer) key_layer = self.transpose_for_scores(mixed_key_layer) value_layer = self.transpose_for_scores(mixed_value_layer) conv_attn_layer = torch.multiply(mixed_key_conv_attn_layer, mixed_query_layer) conv_kernel_layer = self.conv_kernel_layer(conv_attn_layer) conv_kernel_layer = torch.reshape(conv_kernel_layer, [-1, self.conv_kernel_size, 1]) conv_kernel_layer = torch.softmax(conv_kernel_layer, dim=1) conv_out_layer = self.conv_out_layer(hidden_states) conv_out_layer = torch.reshape(conv_out_layer, [batch_size, -1, self.all_head_size]) conv_out_layer = conv_out_layer.transpose(1, 2).contiguous().unsqueeze(-1) conv_out_layer = nn.functional.unfold( conv_out_layer, kernel_size=[self.conv_kernel_size, 1], dilation=1, padding=[(self.conv_kernel_size - 1) // 2, 0], stride=1, ) conv_out_layer = conv_out_layer.transpose(1, 2).reshape( batch_size, -1, self.all_head_size, self.conv_kernel_size ) conv_out_layer = torch.reshape(conv_out_layer, [-1, self.attention_head_size, self.conv_kernel_size]) conv_out_layer = torch.matmul(conv_out_layer, conv_kernel_layer) conv_out_layer = torch.reshape(conv_out_layer, [-1, self.all_head_size]) # Take the dot product between "query" and "key" to get the raw attention scores. attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) attention_scores = attention_scores / math.sqrt(self.attention_head_size) if attention_mask is not None: # Apply the attention mask is (precomputed for all layers in ConvBertModel forward() function) attention_scores = attention_scores + attention_mask # Normalize the attention scores to probabilities. attention_probs = nn.functional.softmax(attention_scores, dim=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(attention_probs) # Mask heads if we want to if head_mask is not None: attention_probs = attention_probs * head_mask context_layer = torch.matmul(attention_probs, value_layer) context_layer = context_layer.permute(0, 2, 1, 3).contiguous() conv_out = torch.reshape(conv_out_layer, [batch_size, -1, self.num_attention_heads, self.attention_head_size]) context_layer = torch.cat([context_layer, conv_out], 2) # conv and context new_context_layer_shape = context_layer.size()[:-2] + ( self.num_attention_heads * self.attention_head_size * 2, ) context_layer = context_layer.view(*new_context_layer_shape) outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) return outputs class ConvBertSelfOutput(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class ConvBertAttention(nn.Module): def __init__(self, config): super().__init__() self.self = ConvBertSelfAttention(config) self.output = ConvBertSelfOutput(config) self.pruned_heads = set() def prune_heads(self, heads): if len(heads) == 0: return heads, index = find_pruneable_heads_and_indices( heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads ) # Prune linear layers self.self.query = prune_linear_layer(self.self.query, index) self.self.key = prune_linear_layer(self.self.key, index) self.self.value = prune_linear_layer(self.self.value, index) self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) # Update hyper params and store pruned heads self.self.num_attention_heads = self.self.num_attention_heads - len(heads) self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads self.pruned_heads = self.pruned_heads.union(heads) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor, Optional[torch.FloatTensor]]: self_outputs = self.self( hidden_states, attention_mask, head_mask, encoder_hidden_states, output_attentions, ) attention_output = self.output(self_outputs[0], hidden_states) outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them return outputs class GroupedLinearLayer(nn.Module): def __init__(self, input_size, output_size, num_groups): super().__init__() self.input_size = input_size self.output_size = output_size self.num_groups = num_groups self.group_in_dim = self.input_size // self.num_groups self.group_out_dim = self.output_size // self.num_groups self.weight = nn.Parameter(torch.empty(self.num_groups, self.group_in_dim, self.group_out_dim)) self.bias = nn.Parameter(torch.empty(output_size)) def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: batch_size = list(hidden_states.size())[0] x = torch.reshape(hidden_states, [-1, self.num_groups, self.group_in_dim]) x = x.permute(1, 0, 2) x = torch.matmul(x, self.weight) x = x.permute(1, 0, 2) x = torch.reshape(x, [batch_size, -1, self.output_size]) x = x + self.bias return x class ConvBertIntermediate(nn.Module): def __init__(self, config): super().__init__() if config.num_groups == 1: self.dense = nn.Linear(config.hidden_size, config.intermediate_size) else: self.dense = GroupedLinearLayer( input_size=config.hidden_size, output_size=config.intermediate_size, num_groups=config.num_groups ) if isinstance(config.hidden_act, str): self.intermediate_act_fn = ACT2FN[config.hidden_act] else: self.intermediate_act_fn = config.hidden_act def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) return hidden_states class ConvBertOutput(nn.Module): def __init__(self, config): super().__init__() if config.num_groups == 1: self.dense = nn.Linear(config.intermediate_size, config.hidden_size) else: self.dense = GroupedLinearLayer( input_size=config.intermediate_size, output_size=config.hidden_size, num_groups=config.num_groups ) self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) hidden_states = self.LayerNorm(hidden_states + input_tensor) return hidden_states class ConvBertLayer(nn.Module): def __init__(self, config): super().__init__() self.chunk_size_feed_forward = config.chunk_size_feed_forward self.seq_len_dim = 1 self.attention = ConvBertAttention(config) self.is_decoder = config.is_decoder self.add_cross_attention = config.add_cross_attention if self.add_cross_attention: if not self.is_decoder: raise TypeError(f"{self} should be used as a decoder model if cross attention is added") self.crossattention = ConvBertAttention(config) self.intermediate = ConvBertIntermediate(config) self.output = ConvBertOutput(config) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = False, ) -> Tuple[torch.Tensor, Optional[torch.FloatTensor]]: self_attention_outputs = self.attention( hidden_states, attention_mask, head_mask, output_attentions=output_attentions, ) attention_output = self_attention_outputs[0] outputs = self_attention_outputs[1:] # add self attentions if we output attention weights if self.is_decoder and encoder_hidden_states is not None: if not hasattr(self, "crossattention"): raise AttributeError( f"If `encoder_hidden_states` are passed, {self} has to be instantiated with cross-attention layers" " by setting `config.add_cross_attention=True`" ) cross_attention_outputs = self.crossattention( attention_output, encoder_attention_mask, head_mask, encoder_hidden_states, output_attentions, ) attention_output = cross_attention_outputs[0] outputs = outputs + cross_attention_outputs[1:] # add cross attentions if we output attention weights layer_output = apply_chunking_to_forward( self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output ) outputs = (layer_output,) + outputs return outputs def feed_forward_chunk(self, attention_output): intermediate_output = self.intermediate(attention_output) layer_output = self.output(intermediate_output, attention_output) return layer_output class ConvBertEncoder(nn.Module): def __init__(self, config): super().__init__() self.config = config self.layer = nn.ModuleList([ConvBertLayer(config) for _ in range(config.num_hidden_layers)]) self.gradient_checkpointing = False def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.FloatTensor] = None, head_mask: Optional[torch.FloatTensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool] = False, return_dict: Optional[bool] = True, ) -> Union[Tuple, BaseModelOutputWithCrossAttentions]: all_hidden_states = () if output_hidden_states else None all_self_attentions = () if output_attentions else None all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_head_mask = head_mask[i] if head_mask is not None else None if self.gradient_checkpointing and self.training: def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(layer_module), hidden_states, attention_mask, layer_head_mask, encoder_hidden_states, encoder_attention_mask, ) else: layer_outputs = layer_module( hidden_states, attention_mask, layer_head_mask, encoder_hidden_states, encoder_attention_mask, output_attentions, ) hidden_states = layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) if self.config.add_cross_attention: all_cross_attentions = all_cross_attentions + (layer_outputs[2],) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple( v for v in [hidden_states, all_hidden_states, all_self_attentions, all_cross_attentions] if v is not None ) return BaseModelOutputWithCrossAttentions( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, cross_attentions=all_cross_attentions, ) class ConvBertPredictionHeadTransform(nn.Module): def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) if isinstance(config.hidden_act, str): self.transform_act_fn = ACT2FN[config.hidden_act] else: self.transform_act_fn = config.hidden_act self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.transform_act_fn(hidden_states) hidden_states = self.LayerNorm(hidden_states) return hidden_states CONVBERT_START_DOCSTRING = r""" This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`ConvBertConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ CONVBERT_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `({0})`): Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*): Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) position_ids (`torch.LongTensor` of shape `({0})`, *optional*): Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert *input_ids* indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare ConvBERT Model transformer outputting raw hidden-states without any specific head on top.", CONVBERT_START_DOCSTRING, ) class ConvBertModel(ConvBertPreTrainedModel): _keys_to_ignore_on_load_missing = ["embeddings.position_ids"] def __init__(self, config): super().__init__(config) self.embeddings = ConvBertEmbeddings(config) if config.embedding_size != config.hidden_size: self.embeddings_project = nn.Linear(config.embedding_size, config.hidden_size) self.encoder = ConvBertEncoder(config) self.config = config # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value def _prune_heads(self, heads_to_prune): """ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): self.encoder.layer[layer].attention.prune_heads(heads) @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=BaseModelOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, BaseModelOutputWithCrossAttentions]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") batch_size, seq_length = input_shape device = input_ids.device if input_ids is not None else inputs_embeds.device if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) if token_type_ids is None: if hasattr(self.embeddings, "token_type_ids"): buffered_token_type_ids = self.embeddings.token_type_ids[:, :seq_length] buffered_token_type_ids_expanded = buffered_token_type_ids.expand(batch_size, seq_length) token_type_ids = buffered_token_type_ids_expanded else: token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) hidden_states = self.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) if hasattr(self, "embeddings_project"): hidden_states = self.embeddings_project(hidden_states) hidden_states = self.encoder( hidden_states, attention_mask=extended_attention_mask, head_mask=head_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) return hidden_states class ConvBertGeneratorPredictions(nn.Module): """Prediction module for the generator, made up of two dense layers.""" def __init__(self, config): super().__init__() self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps) self.dense = nn.Linear(config.hidden_size, config.embedding_size) def forward(self, generator_hidden_states: torch.FloatTensor) -> torch.FloatTensor: hidden_states = self.dense(generator_hidden_states) hidden_states = get_activation("gelu")(hidden_states) hidden_states = self.LayerNorm(hidden_states) return hidden_states @add_start_docstrings("""ConvBERT Model with a `language modeling` head on top.""", CONVBERT_START_DOCSTRING) class ConvBertForMaskedLM(ConvBertPreTrainedModel): _keys_to_ignore_on_load_missing = ["embeddings.position_ids", "generator.lm_head.weight"] def __init__(self, config): super().__init__(config) self.convbert = ConvBertModel(config) self.generator_predictions = ConvBertGeneratorPredictions(config) self.generator_lm_head = nn.Linear(config.embedding_size, config.vocab_size) # Initialize weights and apply final processing self.post_init() def get_output_embeddings(self): return self.generator_lm_head def set_output_embeddings(self, word_embeddings): self.generator_lm_head = word_embeddings @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=MaskedLMOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, MaskedLMOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]` """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict generator_hidden_states = self.convbert( input_ids, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, output_attentions, output_hidden_states, return_dict, ) generator_sequence_output = generator_hidden_states[0] prediction_scores = self.generator_predictions(generator_sequence_output) prediction_scores = self.generator_lm_head(prediction_scores) loss = None # Masked language modeling softmax layer if labels is not None: loss_fct = nn.CrossEntropyLoss() # -100 index = padding token loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (prediction_scores,) + generator_hidden_states[1:] return ((loss,) + output) if loss is not None else output return MaskedLMOutput( loss=loss, logits=prediction_scores, hidden_states=generator_hidden_states.hidden_states, attentions=generator_hidden_states.attentions, ) class ConvBertClassificationHead(nn.Module): """Head for sentence-level classification tasks.""" def __init__(self, config): super().__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) classifier_dropout = ( config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob ) self.dropout = nn.Dropout(classifier_dropout) self.out_proj = nn.Linear(config.hidden_size, config.num_labels) self.config = config def forward(self, hidden_states: torch.Tensor, **kwargs) -> torch.Tensor: x = hidden_states[:, 0, :] # take <s> token (equiv. to [CLS]) x = self.dropout(x) x = self.dense(x) x = ACT2FN[self.config.hidden_act](x) x = self.dropout(x) x = self.out_proj(x) return x @add_start_docstrings( """ ConvBERT Model transformer with a sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for GLUE tasks. """, CONVBERT_START_DOCSTRING, ) class ConvBertForSequenceClassification(ConvBertPreTrainedModel): _keys_to_ignore_on_load_missing = ["embeddings.position_ids"] def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.config = config self.convbert = ConvBertModel(config) self.classifier = ConvBertClassificationHead(config) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, SequenceClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.convbert( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] logits = self.classifier(sequence_output) loss = None if labels is not None: if self.config.problem_type is None: if self.num_labels == 1: self.config.problem_type = "regression" elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): self.config.problem_type = "single_label_classification" else: self.config.problem_type = "multi_label_classification" if self.config.problem_type == "regression": loss_fct = MSELoss() if self.num_labels == 1: loss = loss_fct(logits.squeeze(), labels.squeeze()) else: loss = loss_fct(logits, labels) elif self.config.problem_type == "single_label_classification": loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) elif self.config.problem_type == "multi_label_classification": loss_fct = BCEWithLogitsLoss() loss = loss_fct(logits, labels) if not return_dict: output = (logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ ConvBERT Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a softmax) e.g. for RocStories/SWAG tasks. """, CONVBERT_START_DOCSTRING, ) class ConvBertForMultipleChoice(ConvBertPreTrainedModel): _keys_to_ignore_on_load_missing = ["embeddings.position_ids"] def __init__(self, config): super().__init__(config) self.convbert = ConvBertModel(config) self.sequence_summary = SequenceSummary(config) self.classifier = nn.Linear(config.hidden_size, 1) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward( CONVBERT_INPUTS_DOCSTRING.format("batch_size, num_choices, sequence_length") ) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=MultipleChoiceModelOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, MultipleChoiceModelOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for computing the multiple choice classification loss. Indices should be in `[0, ..., num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See `input_ids` above) """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict num_choices = input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1] input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None position_ids = position_ids.view(-1, position_ids.size(-1)) if position_ids is not None else None inputs_embeds = ( inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1)) if inputs_embeds is not None else None ) outputs = self.convbert( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] pooled_output = self.sequence_summary(sequence_output) logits = self.classifier(pooled_output) reshaped_logits = logits.view(-1, num_choices) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(reshaped_logits, labels) if not return_dict: output = (reshaped_logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return MultipleChoiceModelOutput( loss=loss, logits=reshaped_logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ ConvBERT Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. """, CONVBERT_START_DOCSTRING, ) class ConvBertForTokenClassification(ConvBertPreTrainedModel): _keys_to_ignore_on_load_missing = ["embeddings.position_ids"] def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.convbert = ConvBertModel(config) classifier_dropout = ( config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob ) self.dropout = nn.Dropout(classifier_dropout) self.classifier = nn.Linear(config.hidden_size, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=TokenClassifierOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, TokenClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.convbert( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] sequence_output = self.dropout(sequence_output) logits = self.classifier(sequence_output) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[1:] return ((loss,) + output) if loss is not None else output return TokenClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) @add_start_docstrings( """ ConvBERT Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of the hidden-states output to compute `span start logits` and `span end logits`). """, CONVBERT_START_DOCSTRING, ) class ConvBertForQuestionAnswering(ConvBertPreTrainedModel): _keys_to_ignore_on_load_missing = ["embeddings.position_ids"] def __init__(self, config): super().__init__(config) self.num_labels = config.num_labels self.convbert = ConvBertModel(config) self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels) # Initialize weights and apply final processing self.post_init() @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( checkpoint=_CHECKPOINT_FOR_DOC, output_type=QuestionAnsweringModelOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.FloatTensor] = None, token_type_ids: Optional[torch.LongTensor] = None, position_ids: Optional[torch.LongTensor] = None, head_mask: Optional[torch.FloatTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, start_positions: Optional[torch.LongTensor] = None, end_positions: Optional[torch.LongTensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple, QuestionAnsweringModelOutput]: r""" start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the start of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. end_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Labels for position (index) of the end of the labelled span for computing the token classification loss. Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence are not taken into account for computing the loss. """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.convbert( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = outputs[0] logits = self.qa_outputs(sequence_output) start_logits, end_logits = logits.split(1, dim=-1) start_logits = start_logits.squeeze(-1).contiguous() end_logits = end_logits.squeeze(-1).contiguous() total_loss = None if start_positions is not None and end_positions is not None: # If we are on multi-GPU, split add a dimension if len(start_positions.size()) > 1: start_positions = start_positions.squeeze(-1) if len(end_positions.size()) > 1: end_positions = end_positions.squeeze(-1) # sometimes the start/end positions are outside our model inputs, we ignore these terms ignored_index = start_logits.size(1) start_positions = start_positions.clamp(0, ignored_index) end_positions = end_positions.clamp(0, ignored_index) loss_fct = CrossEntropyLoss(ignore_index=ignored_index) start_loss = loss_fct(start_logits, start_positions) end_loss = loss_fct(end_logits, end_positions) total_loss = (start_loss + end_loss) / 2 if not return_dict: output = (start_logits, end_logits) + outputs[1:] return ((total_loss,) + output) if total_loss is not None else output return QuestionAnsweringModelOutput( loss=total_loss, start_logits=start_logits, end_logits=end_logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, )
27182812/ChatGLM-LLaMA-chinese-insturct
21,308
src/transformers/models/convbert/tokenization_convbert.py
# coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenization classes for ConvBERT.""" import collections import os import unicodedata from typing import List, Optional, Tuple from ...tokenization_utils import PreTrainedTokenizer, _is_control, _is_punctuation, _is_whitespace from ...utils import logging logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "YituTech/conv-bert-base": "https://huggingface.co/YituTech/conv-bert-base/resolve/main/vocab.txt", "YituTech/conv-bert-medium-small": ( "https://huggingface.co/YituTech/conv-bert-medium-small/resolve/main/vocab.txt" ), "YituTech/conv-bert-small": "https://huggingface.co/YituTech/conv-bert-small/resolve/main/vocab.txt", } } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "YituTech/conv-bert-base": 512, "YituTech/conv-bert-medium-small": 512, "YituTech/conv-bert-small": 512, } PRETRAINED_INIT_CONFIGURATION = { "YituTech/conv-bert-base": {"do_lower_case": True}, "YituTech/conv-bert-medium-small": {"do_lower_case": True}, "YituTech/conv-bert-small": {"do_lower_case": True}, } # Copied from transformers.models.bert.tokenization_bert.load_vocab def load_vocab(vocab_file): """Loads a vocabulary file into a dictionary.""" vocab = collections.OrderedDict() with open(vocab_file, "r", encoding="utf-8") as reader: tokens = reader.readlines() for index, token in enumerate(tokens): token = token.rstrip("\n") vocab[token] = index return vocab # Copied from transformers.models.bert.tokenization_bert.whitespace_tokenize def whitespace_tokenize(text): """Runs basic whitespace cleaning and splitting on a piece of text.""" text = text.strip() if not text: return [] tokens = text.split() return tokens # Copied from transformers.models.bert.tokenization_bert.BertTokenizer with bert-base-cased->YituTech/conv-bert-base, ConvBertTokenizer->BertTokenizer, BERT->ConvBERT class ConvBertTokenizer(PreTrainedTokenizer): r""" Construct a ConvBERT tokenizer. Based on WordPiece. This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): File containing the vocabulary. do_lower_case (`bool`, *optional*, defaults to `True`): Whether or not to lowercase the input when tokenizing. do_basic_tokenize (`bool`, *optional*, defaults to `True`): Whether or not to do basic tokenization before WordPiece. never_split (`Iterable`, *optional*): Collection of tokens which will never be split during tokenization. Only has an effect when `do_basic_tokenize=True` unk_token (`str`, *optional*, defaults to `"[UNK]"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"[SEP]"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"[PAD]"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"[CLS]"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"[MASK]"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see this [issue](https://github.com/huggingface/transformers/issues/328)). strip_accents (`bool`, *optional*): Whether or not to strip all accents. If this option is not specified, then it will be determined by the value for `lowercase` (as in the original ConvBERT). """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES def __init__( self, vocab_file, do_lower_case=True, do_basic_tokenize=True, never_split=None, unk_token="[UNK]", sep_token="[SEP]", pad_token="[PAD]", cls_token="[CLS]", mask_token="[MASK]", tokenize_chinese_chars=True, strip_accents=None, **kwargs, ): super().__init__( do_lower_case=do_lower_case, do_basic_tokenize=do_basic_tokenize, never_split=never_split, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, **kwargs, ) if not os.path.isfile(vocab_file): raise ValueError( f"Can't find a vocabulary file at path '{vocab_file}'. To load the vocabulary from a Google pretrained" " model use `tokenizer = BertTokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`" ) self.vocab = load_vocab(vocab_file) self.ids_to_tokens = collections.OrderedDict([(ids, tok) for tok, ids in self.vocab.items()]) self.do_basic_tokenize = do_basic_tokenize if do_basic_tokenize: self.basic_tokenizer = BasicTokenizer( do_lower_case=do_lower_case, never_split=never_split, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, ) self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab, unk_token=self.unk_token) @property def do_lower_case(self): return self.basic_tokenizer.do_lower_case @property def vocab_size(self): return len(self.vocab) def get_vocab(self): return dict(self.vocab, **self.added_tokens_encoder) def _tokenize(self, text): split_tokens = [] if self.do_basic_tokenize: for token in self.basic_tokenizer.tokenize(text, never_split=self.all_special_tokens): # If the token is part of the never_split set if token in self.basic_tokenizer.never_split: split_tokens.append(token) else: split_tokens += self.wordpiece_tokenizer.tokenize(token) else: split_tokens = self.wordpiece_tokenizer.tokenize(text) return split_tokens def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" return self.vocab.get(token, self.vocab.get(self.unk_token)) def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" return self.ids_to_tokens.get(index, self.unk_token) def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (string) in a single string.""" out_string = " ".join(tokens).replace(" ##", "").strip() return out_string def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A ConvBERT sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ if token_ids_1 is None: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + token_ids_1 + sep def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_special_tokens (`bool`, *optional*, defaults to `False`): Whether or not the token list is already formatted with special tokens for the model. Returns: `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True ) if token_ids_1 is not None: return [1] + ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1] return [1] + ([0] * len(token_ids_0)) + [1] def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. A ConvBERT sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: index = 0 if os.path.isdir(save_directory): vocab_file = os.path.join( save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] ) else: vocab_file = (filename_prefix + "-" if filename_prefix else "") + save_directory with open(vocab_file, "w", encoding="utf-8") as writer: for token, token_index in sorted(self.vocab.items(), key=lambda kv: kv[1]): if index != token_index: logger.warning( f"Saving vocabulary to {vocab_file}: vocabulary indices are not consecutive." " Please check that the vocabulary is not corrupted!" ) index = token_index writer.write(token + "\n") index += 1 return (vocab_file,) # Copied from transformers.models.bert.tokenization_bert.BasicTokenizer class BasicTokenizer(object): """ Constructs a BasicTokenizer that will run basic tokenization (punctuation splitting, lower casing, etc.). Args: do_lower_case (`bool`, *optional*, defaults to `True`): Whether or not to lowercase the input when tokenizing. never_split (`Iterable`, *optional*): Collection of tokens which will never be split during tokenization. Only has an effect when `do_basic_tokenize=True` tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see this [issue](https://github.com/huggingface/transformers/issues/328)). strip_accents (`bool`, *optional*): Whether or not to strip all accents. If this option is not specified, then it will be determined by the value for `lowercase` (as in the original BERT). """ def __init__(self, do_lower_case=True, never_split=None, tokenize_chinese_chars=True, strip_accents=None): if never_split is None: never_split = [] self.do_lower_case = do_lower_case self.never_split = set(never_split) self.tokenize_chinese_chars = tokenize_chinese_chars self.strip_accents = strip_accents def tokenize(self, text, never_split=None): """ Basic Tokenization of a piece of text. Split on "white spaces" only, for sub-word tokenization, see WordPieceTokenizer. Args: never_split (`List[str]`, *optional*) Kept for backward compatibility purposes. Now implemented directly at the base class level (see [`PreTrainedTokenizer.tokenize`]) List of token not to split. """ # union() returns a new set by concatenating the two sets. never_split = self.never_split.union(set(never_split)) if never_split else self.never_split text = self._clean_text(text) # This was added on November 1st, 2018 for the multilingual and Chinese # models. This is also applied to the English models now, but it doesn't # matter since the English models were not trained on any Chinese data # and generally don't have any Chinese data in them (there are Chinese # characters in the vocabulary because Wikipedia does have some Chinese # words in the English Wikipedia.). if self.tokenize_chinese_chars: text = self._tokenize_chinese_chars(text) orig_tokens = whitespace_tokenize(text) split_tokens = [] for token in orig_tokens: if token not in never_split: if self.do_lower_case: token = token.lower() if self.strip_accents is not False: token = self._run_strip_accents(token) elif self.strip_accents: token = self._run_strip_accents(token) split_tokens.extend(self._run_split_on_punc(token, never_split)) output_tokens = whitespace_tokenize(" ".join(split_tokens)) return output_tokens def _run_strip_accents(self, text): """Strips accents from a piece of text.""" text = unicodedata.normalize("NFD", text) output = [] for char in text: cat = unicodedata.category(char) if cat == "Mn": continue output.append(char) return "".join(output) def _run_split_on_punc(self, text, never_split=None): """Splits punctuation on a piece of text.""" if never_split is not None and text in never_split: return [text] chars = list(text) i = 0 start_new_word = True output = [] while i < len(chars): char = chars[i] if _is_punctuation(char): output.append([char]) start_new_word = True else: if start_new_word: output.append([]) start_new_word = False output[-1].append(char) i += 1 return ["".join(x) for x in output] def _tokenize_chinese_chars(self, text): """Adds whitespace around any CJK character.""" output = [] for char in text: cp = ord(char) if self._is_chinese_char(cp): output.append(" ") output.append(char) output.append(" ") else: output.append(char) return "".join(output) def _is_chinese_char(self, cp): """Checks whether CP is the codepoint of a CJK character.""" # This defines a "chinese character" as anything in the CJK Unicode block: # https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) # # Note that the CJK Unicode block is NOT all Japanese and Korean characters, # despite its name. The modern Korean Hangul alphabet is a different block, # as is Japanese Hiragana and Katakana. Those alphabets are used to write # space-separated words, so they are not treated specially and handled # like the all of the other languages. if ( (cp >= 0x4E00 and cp <= 0x9FFF) or (cp >= 0x3400 and cp <= 0x4DBF) # or (cp >= 0x20000 and cp <= 0x2A6DF) # or (cp >= 0x2A700 and cp <= 0x2B73F) # or (cp >= 0x2B740 and cp <= 0x2B81F) # or (cp >= 0x2B820 and cp <= 0x2CEAF) # or (cp >= 0xF900 and cp <= 0xFAFF) or (cp >= 0x2F800 and cp <= 0x2FA1F) # ): # return True return False def _clean_text(self, text): """Performs invalid character removal and whitespace cleanup on text.""" output = [] for char in text: cp = ord(char) if cp == 0 or cp == 0xFFFD or _is_control(char): continue if _is_whitespace(char): output.append(" ") else: output.append(char) return "".join(output) # Copied from transformers.models.bert.tokenization_bert.WordpieceTokenizer class WordpieceTokenizer(object): """Runs WordPiece tokenization.""" def __init__(self, vocab, unk_token, max_input_chars_per_word=100): self.vocab = vocab self.unk_token = unk_token self.max_input_chars_per_word = max_input_chars_per_word def tokenize(self, text): """ Tokenizes a piece of text into its word pieces. This uses a greedy longest-match-first algorithm to perform tokenization using the given vocabulary. For example, `input = "unaffable"` wil return as output `["un", "##aff", "##able"]`. Args: text: A single token or whitespace separated tokens. This should have already been passed through *BasicTokenizer*. Returns: A list of wordpiece tokens. """ output_tokens = [] for token in whitespace_tokenize(text): chars = list(token) if len(chars) > self.max_input_chars_per_word: output_tokens.append(self.unk_token) continue is_bad = False start = 0 sub_tokens = [] while start < len(chars): end = len(chars) cur_substr = None while start < end: substr = "".join(chars[start:end]) if start > 0: substr = "##" + substr if substr in self.vocab: cur_substr = substr break end -= 1 if cur_substr is None: is_bad = True break sub_tokens.append(cur_substr) start = end if is_bad: output_tokens.append(self.unk_token) else: output_tokens.extend(sub_tokens) return output_tokens
27182812/ChatGLM-LLaMA-chinese-insturct
7,311
src/transformers/models/convbert/configuration_convbert.py
# coding=utf-8 # Copyright The HuggingFace team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ ConvBERT model configuration""" from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging logger = logging.get_logger(__name__) CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP = { "YituTech/conv-bert-base": "https://huggingface.co/YituTech/conv-bert-base/resolve/main/config.json", "YituTech/conv-bert-medium-small": ( "https://huggingface.co/YituTech/conv-bert-medium-small/resolve/main/config.json" ), "YituTech/conv-bert-small": "https://huggingface.co/YituTech/conv-bert-small/resolve/main/config.json", # See all ConvBERT models at https://huggingface.co/models?filter=convbert } class ConvBertConfig(PretrainedConfig): r""" This is the configuration class to store the configuration of a [`ConvBertModel`]. It is used to instantiate an ConvBERT model according to the specified arguments, defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of the ConvBERT [YituTech/conv-bert-base](https://huggingface.co/YituTech/conv-bert-base) architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information. Args: vocab_size (`int`, *optional*, defaults to 30522): Vocabulary size of the ConvBERT model. Defines the number of different tokens that can be represented by the `inputs_ids` passed when calling [`ConvBertModel`] or [`TFConvBertModel`]. hidden_size (`int`, *optional*, defaults to 768): Dimensionality of the encoder layers and the pooler layer. num_hidden_layers (`int`, *optional*, defaults to 12): Number of hidden layers in the Transformer encoder. num_attention_heads (`int`, *optional*, defaults to 12): Number of attention heads for each attention layer in the Transformer encoder. intermediate_size (`int`, *optional*, defaults to 3072): Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder. hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`): The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported. hidden_dropout_prob (`float`, *optional*, defaults to 0.1): The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler. attention_probs_dropout_prob (`float`, *optional*, defaults to 0.1): The dropout ratio for the attention probabilities. max_position_embeddings (`int`, *optional*, defaults to 512): The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 512 or 1024 or 2048). type_vocab_size (`int`, *optional*, defaults to 2): The vocabulary size of the `token_type_ids` passed when calling [`ConvBertModel`] or [`TFConvBertModel`]. initializer_range (`float`, *optional*, defaults to 0.02): The standard deviation of the truncated_normal_initializer for initializing all weight matrices. layer_norm_eps (`float`, *optional*, defaults to 1e-12): The epsilon used by the layer normalization layers. head_ratio (`int`, *optional*, defaults to 2): Ratio gamma to reduce the number of attention heads. num_groups (`int`, *optional*, defaults to 1): The number of groups for grouped linear layers for ConvBert model conv_kernel_size (`int`, *optional*, defaults to 9): The size of the convolutional kernel. classifier_dropout (`float`, *optional*): The dropout ratio for the classification head. Example: ```python >>> from transformers import ConvBertConfig, ConvBertModel >>> # Initializing a ConvBERT convbert-base-uncased style configuration >>> configuration = ConvBertConfig() >>> # Initializing a model (with random weights) from the convbert-base-uncased style configuration >>> model = ConvBertModel(configuration) >>> # Accessing the model configuration >>> configuration = model.config ```""" model_type = "convbert" def __init__( self, vocab_size=30522, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072, hidden_act="gelu", hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=512, type_vocab_size=2, initializer_range=0.02, layer_norm_eps=1e-12, pad_token_id=1, bos_token_id=0, eos_token_id=2, embedding_size=768, head_ratio=2, conv_kernel_size=9, num_groups=1, classifier_dropout=None, **kwargs, ): super().__init__( pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs, ) self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.hidden_act = hidden_act self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.type_vocab_size = type_vocab_size self.initializer_range = initializer_range self.layer_norm_eps = layer_norm_eps self.embedding_size = embedding_size self.head_ratio = head_ratio self.conv_kernel_size = conv_kernel_size self.num_groups = num_groups self.classifier_dropout = classifier_dropout # Copied from transformers.models.bert.configuration_bert.BertOnnxConfig class ConvBertOnnxConfig(OnnxConfig): @property def inputs(self) -> Mapping[str, Mapping[int, str]]: if self.task == "multiple-choice": dynamic_axis = {0: "batch", 1: "choice", 2: "sequence"} else: dynamic_axis = {0: "batch", 1: "sequence"} return OrderedDict( [ ("input_ids", dynamic_axis), ("attention_mask", dynamic_axis), ("token_type_ids", dynamic_axis), ] )