model.ckpt.meda.json 199 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342
  1. meta_info_def {
  2. stripped_op_list {
  3. op {
  4. name: "Add"
  5. input_arg {
  6. name: "x"
  7. type_attr: "T"
  8. }
  9. input_arg {
  10. name: "y"
  11. type_attr: "T"
  12. }
  13. output_arg {
  14. name: "z"
  15. type_attr: "T"
  16. }
  17. attr {
  18. name: "T"
  19. type: "type"
  20. allowed_values {
  21. list {
  22. type: DT_BFLOAT16
  23. type: DT_HALF
  24. type: DT_FLOAT
  25. type: DT_DOUBLE
  26. type: DT_UINT8
  27. type: DT_INT8
  28. type: DT_INT16
  29. type: DT_INT32
  30. type: DT_INT64
  31. type: DT_COMPLEX64
  32. type: DT_COMPLEX128
  33. type: DT_STRING
  34. }
  35. }
  36. }
  37. }
  38. op {
  39. name: "AddN"
  40. input_arg {
  41. name: "inputs"
  42. type_attr: "T"
  43. number_attr: "N"
  44. }
  45. output_arg {
  46. name: "sum"
  47. type_attr: "T"
  48. }
  49. attr {
  50. name: "N"
  51. type: "int"
  52. has_minimum: true
  53. minimum: 1
  54. }
  55. attr {
  56. name: "T"
  57. type: "type"
  58. allowed_values {
  59. list {
  60. type: DT_FLOAT
  61. type: DT_DOUBLE
  62. type: DT_INT32
  63. type: DT_UINT8
  64. type: DT_INT16
  65. type: DT_INT8
  66. type: DT_COMPLEX64
  67. type: DT_INT64
  68. type: DT_QINT8
  69. type: DT_QUINT8
  70. type: DT_QINT32
  71. type: DT_BFLOAT16
  72. type: DT_UINT16
  73. type: DT_COMPLEX128
  74. type: DT_HALF
  75. type: DT_UINT32
  76. type: DT_UINT64
  77. type: DT_VARIANT
  78. }
  79. }
  80. }
  81. is_aggregate: true
  82. is_commutative: true
  83. }
  84. op {
  85. name: "ApplyGradientDescent"
  86. input_arg {
  87. name: "var"
  88. type_attr: "T"
  89. is_ref: true
  90. }
  91. input_arg {
  92. name: "alpha"
  93. type_attr: "T"
  94. }
  95. input_arg {
  96. name: "delta"
  97. type_attr: "T"
  98. }
  99. output_arg {
  100. name: "out"
  101. type_attr: "T"
  102. is_ref: true
  103. }
  104. attr {
  105. name: "T"
  106. type: "type"
  107. allowed_values {
  108. list {
  109. type: DT_FLOAT
  110. type: DT_DOUBLE
  111. type: DT_INT32
  112. type: DT_UINT8
  113. type: DT_INT16
  114. type: DT_INT8
  115. type: DT_COMPLEX64
  116. type: DT_INT64
  117. type: DT_QINT8
  118. type: DT_QUINT8
  119. type: DT_QINT32
  120. type: DT_BFLOAT16
  121. type: DT_UINT16
  122. type: DT_COMPLEX128
  123. type: DT_HALF
  124. type: DT_UINT32
  125. type: DT_UINT64
  126. }
  127. }
  128. }
  129. attr {
  130. name: "use_locking"
  131. type: "bool"
  132. default_value {
  133. b: false
  134. }
  135. }
  136. }
  137. op {
  138. name: "ArgMax"
  139. input_arg {
  140. name: "input"
  141. type_attr: "T"
  142. }
  143. input_arg {
  144. name: "dimension"
  145. type_attr: "Tidx"
  146. }
  147. output_arg {
  148. name: "output"
  149. type_attr: "output_type"
  150. }
  151. attr {
  152. name: "T"
  153. type: "type"
  154. allowed_values {
  155. list {
  156. type: DT_FLOAT
  157. type: DT_DOUBLE
  158. type: DT_INT32
  159. type: DT_UINT8
  160. type: DT_INT16
  161. type: DT_INT8
  162. type: DT_COMPLEX64
  163. type: DT_INT64
  164. type: DT_QINT8
  165. type: DT_QUINT8
  166. type: DT_QINT32
  167. type: DT_BFLOAT16
  168. type: DT_UINT16
  169. type: DT_COMPLEX128
  170. type: DT_HALF
  171. type: DT_UINT32
  172. type: DT_UINT64
  173. }
  174. }
  175. }
  176. attr {
  177. name: "Tidx"
  178. type: "type"
  179. default_value {
  180. type: DT_INT32
  181. }
  182. allowed_values {
  183. list {
  184. type: DT_INT32
  185. type: DT_INT64
  186. }
  187. }
  188. }
  189. attr {
  190. name: "output_type"
  191. type: "type"
  192. default_value {
  193. type: DT_INT64
  194. }
  195. allowed_values {
  196. list {
  197. type: DT_INT32
  198. type: DT_INT64
  199. }
  200. }
  201. }
  202. }
  203. op {
  204. name: "Assign"
  205. input_arg {
  206. name: "ref"
  207. type_attr: "T"
  208. is_ref: true
  209. }
  210. input_arg {
  211. name: "value"
  212. type_attr: "T"
  213. }
  214. output_arg {
  215. name: "output_ref"
  216. type_attr: "T"
  217. is_ref: true
  218. }
  219. attr {
  220. name: "T"
  221. type: "type"
  222. }
  223. attr {
  224. name: "validate_shape"
  225. type: "bool"
  226. default_value {
  227. b: true
  228. }
  229. }
  230. attr {
  231. name: "use_locking"
  232. type: "bool"
  233. default_value {
  234. b: true
  235. }
  236. }
  237. allows_uninitialized_input: true
  238. }
  239. op {
  240. name: "AssignAdd"
  241. input_arg {
  242. name: "ref"
  243. type_attr: "T"
  244. is_ref: true
  245. }
  246. input_arg {
  247. name: "value"
  248. type_attr: "T"
  249. }
  250. output_arg {
  251. name: "output_ref"
  252. type_attr: "T"
  253. is_ref: true
  254. }
  255. attr {
  256. name: "T"
  257. type: "type"
  258. allowed_values {
  259. list {
  260. type: DT_FLOAT
  261. type: DT_DOUBLE
  262. type: DT_INT32
  263. type: DT_UINT8
  264. type: DT_INT16
  265. type: DT_INT8
  266. type: DT_COMPLEX64
  267. type: DT_INT64
  268. type: DT_QINT8
  269. type: DT_QUINT8
  270. type: DT_QINT32
  271. type: DT_BFLOAT16
  272. type: DT_UINT16
  273. type: DT_COMPLEX128
  274. type: DT_HALF
  275. type: DT_UINT32
  276. type: DT_UINT64
  277. }
  278. }
  279. }
  280. attr {
  281. name: "use_locking"
  282. type: "bool"
  283. default_value {
  284. b: false
  285. }
  286. }
  287. }
  288. op {
  289. name: "AssignSub"
  290. input_arg {
  291. name: "ref"
  292. type_attr: "T"
  293. is_ref: true
  294. }
  295. input_arg {
  296. name: "value"
  297. type_attr: "T"
  298. }
  299. output_arg {
  300. name: "output_ref"
  301. type_attr: "T"
  302. is_ref: true
  303. }
  304. attr {
  305. name: "T"
  306. type: "type"
  307. allowed_values {
  308. list {
  309. type: DT_FLOAT
  310. type: DT_DOUBLE
  311. type: DT_INT32
  312. type: DT_UINT8
  313. type: DT_INT16
  314. type: DT_INT8
  315. type: DT_COMPLEX64
  316. type: DT_INT64
  317. type: DT_QINT8
  318. type: DT_QUINT8
  319. type: DT_QINT32
  320. type: DT_BFLOAT16
  321. type: DT_UINT16
  322. type: DT_COMPLEX128
  323. type: DT_HALF
  324. type: DT_UINT32
  325. type: DT_UINT64
  326. }
  327. }
  328. }
  329. attr {
  330. name: "use_locking"
  331. type: "bool"
  332. default_value {
  333. b: false
  334. }
  335. }
  336. }
  337. op {
  338. name: "BroadcastGradientArgs"
  339. input_arg {
  340. name: "s0"
  341. type_attr: "T"
  342. }
  343. input_arg {
  344. name: "s1"
  345. type_attr: "T"
  346. }
  347. output_arg {
  348. name: "r0"
  349. type_attr: "T"
  350. }
  351. output_arg {
  352. name: "r1"
  353. type_attr: "T"
  354. }
  355. attr {
  356. name: "T"
  357. type: "type"
  358. default_value {
  359. type: DT_INT32
  360. }
  361. allowed_values {
  362. list {
  363. type: DT_INT32
  364. type: DT_INT64
  365. }
  366. }
  367. }
  368. }
  369. op {
  370. name: "Cast"
  371. input_arg {
  372. name: "x"
  373. type_attr: "SrcT"
  374. }
  375. output_arg {
  376. name: "y"
  377. type_attr: "DstT"
  378. }
  379. attr {
  380. name: "SrcT"
  381. type: "type"
  382. }
  383. attr {
  384. name: "DstT"
  385. type: "type"
  386. }
  387. }
  388. op {
  389. name: "Const"
  390. output_arg {
  391. name: "output"
  392. type_attr: "dtype"
  393. }
  394. attr {
  395. name: "value"
  396. type: "tensor"
  397. }
  398. attr {
  399. name: "dtype"
  400. type: "type"
  401. }
  402. }
  403. op {
  404. name: "Equal"
  405. input_arg {
  406. name: "x"
  407. type_attr: "T"
  408. }
  409. input_arg {
  410. name: "y"
  411. type_attr: "T"
  412. }
  413. output_arg {
  414. name: "z"
  415. type: DT_BOOL
  416. }
  417. attr {
  418. name: "T"
  419. type: "type"
  420. allowed_values {
  421. list {
  422. type: DT_BFLOAT16
  423. type: DT_HALF
  424. type: DT_FLOAT
  425. type: DT_DOUBLE
  426. type: DT_UINT8
  427. type: DT_INT8
  428. type: DT_INT16
  429. type: DT_INT32
  430. type: DT_INT64
  431. type: DT_COMPLEX64
  432. type: DT_QUINT8
  433. type: DT_QINT8
  434. type: DT_QINT32
  435. type: DT_STRING
  436. type: DT_BOOL
  437. type: DT_COMPLEX128
  438. }
  439. }
  440. }
  441. is_commutative: true
  442. }
  443. op {
  444. name: "ExpandDims"
  445. input_arg {
  446. name: "input"
  447. type_attr: "T"
  448. }
  449. input_arg {
  450. name: "dim"
  451. type_attr: "Tdim"
  452. }
  453. output_arg {
  454. name: "output"
  455. type_attr: "T"
  456. }
  457. attr {
  458. name: "T"
  459. type: "type"
  460. }
  461. attr {
  462. name: "Tdim"
  463. type: "type"
  464. default_value {
  465. type: DT_INT32
  466. }
  467. allowed_values {
  468. list {
  469. type: DT_INT32
  470. type: DT_INT64
  471. }
  472. }
  473. }
  474. }
  475. op {
  476. name: "Fill"
  477. input_arg {
  478. name: "dims"
  479. type_attr: "index_type"
  480. }
  481. input_arg {
  482. name: "value"
  483. type_attr: "T"
  484. }
  485. output_arg {
  486. name: "output"
  487. type_attr: "T"
  488. }
  489. attr {
  490. name: "T"
  491. type: "type"
  492. }
  493. attr {
  494. name: "index_type"
  495. type: "type"
  496. default_value {
  497. type: DT_INT32
  498. }
  499. allowed_values {
  500. list {
  501. type: DT_INT32
  502. type: DT_INT64
  503. }
  504. }
  505. }
  506. }
  507. op {
  508. name: "FloorDiv"
  509. input_arg {
  510. name: "x"
  511. type_attr: "T"
  512. }
  513. input_arg {
  514. name: "y"
  515. type_attr: "T"
  516. }
  517. output_arg {
  518. name: "z"
  519. type_attr: "T"
  520. }
  521. attr {
  522. name: "T"
  523. type: "type"
  524. allowed_values {
  525. list {
  526. type: DT_BFLOAT16
  527. type: DT_HALF
  528. type: DT_FLOAT
  529. type: DT_DOUBLE
  530. type: DT_UINT8
  531. type: DT_INT8
  532. type: DT_UINT16
  533. type: DT_INT16
  534. type: DT_INT32
  535. type: DT_INT64
  536. type: DT_COMPLEX64
  537. type: DT_COMPLEX128
  538. }
  539. }
  540. }
  541. }
  542. op {
  543. name: "Identity"
  544. input_arg {
  545. name: "input"
  546. type_attr: "T"
  547. }
  548. output_arg {
  549. name: "output"
  550. type_attr: "T"
  551. }
  552. attr {
  553. name: "T"
  554. type: "type"
  555. }
  556. }
  557. op {
  558. name: "IsVariableInitialized"
  559. input_arg {
  560. name: "ref"
  561. type_attr: "dtype"
  562. is_ref: true
  563. }
  564. output_arg {
  565. name: "is_initialized"
  566. type: DT_BOOL
  567. }
  568. attr {
  569. name: "dtype"
  570. type: "type"
  571. }
  572. allows_uninitialized_input: true
  573. }
  574. op {
  575. name: "L2Loss"
  576. input_arg {
  577. name: "t"
  578. type_attr: "T"
  579. }
  580. output_arg {
  581. name: "output"
  582. type_attr: "T"
  583. }
  584. attr {
  585. name: "T"
  586. type: "type"
  587. allowed_values {
  588. list {
  589. type: DT_HALF
  590. type: DT_BFLOAT16
  591. type: DT_FLOAT
  592. type: DT_DOUBLE
  593. }
  594. }
  595. }
  596. }
  597. op {
  598. name: "MatMul"
  599. input_arg {
  600. name: "a"
  601. type_attr: "T"
  602. }
  603. input_arg {
  604. name: "b"
  605. type_attr: "T"
  606. }
  607. output_arg {
  608. name: "product"
  609. type_attr: "T"
  610. }
  611. attr {
  612. name: "transpose_a"
  613. type: "bool"
  614. default_value {
  615. b: false
  616. }
  617. }
  618. attr {
  619. name: "transpose_b"
  620. type: "bool"
  621. default_value {
  622. b: false
  623. }
  624. }
  625. attr {
  626. name: "T"
  627. type: "type"
  628. allowed_values {
  629. list {
  630. type: DT_BFLOAT16
  631. type: DT_HALF
  632. type: DT_FLOAT
  633. type: DT_DOUBLE
  634. type: DT_INT32
  635. type: DT_COMPLEX64
  636. type: DT_COMPLEX128
  637. }
  638. }
  639. }
  640. }
  641. op {
  642. name: "Maximum"
  643. input_arg {
  644. name: "x"
  645. type_attr: "T"
  646. }
  647. input_arg {
  648. name: "y"
  649. type_attr: "T"
  650. }
  651. output_arg {
  652. name: "z"
  653. type_attr: "T"
  654. }
  655. attr {
  656. name: "T"
  657. type: "type"
  658. allowed_values {
  659. list {
  660. type: DT_BFLOAT16
  661. type: DT_HALF
  662. type: DT_FLOAT
  663. type: DT_DOUBLE
  664. type: DT_INT32
  665. type: DT_INT64
  666. }
  667. }
  668. }
  669. is_commutative: true
  670. }
  671. op {
  672. name: "Mean"
  673. input_arg {
  674. name: "input"
  675. type_attr: "T"
  676. }
  677. input_arg {
  678. name: "reduction_indices"
  679. type_attr: "Tidx"
  680. }
  681. output_arg {
  682. name: "output"
  683. type_attr: "T"
  684. }
  685. attr {
  686. name: "keep_dims"
  687. type: "bool"
  688. default_value {
  689. b: false
  690. }
  691. }
  692. attr {
  693. name: "T"
  694. type: "type"
  695. allowed_values {
  696. list {
  697. type: DT_FLOAT
  698. type: DT_DOUBLE
  699. type: DT_INT32
  700. type: DT_UINT8
  701. type: DT_INT16
  702. type: DT_INT8
  703. type: DT_COMPLEX64
  704. type: DT_INT64
  705. type: DT_QINT8
  706. type: DT_QUINT8
  707. type: DT_QINT32
  708. type: DT_BFLOAT16
  709. type: DT_UINT16
  710. type: DT_COMPLEX128
  711. type: DT_HALF
  712. type: DT_UINT32
  713. type: DT_UINT64
  714. }
  715. }
  716. }
  717. attr {
  718. name: "Tidx"
  719. type: "type"
  720. default_value {
  721. type: DT_INT32
  722. }
  723. allowed_values {
  724. list {
  725. type: DT_INT32
  726. type: DT_INT64
  727. }
  728. }
  729. }
  730. }
  731. op {
  732. name: "Merge"
  733. input_arg {
  734. name: "inputs"
  735. type_attr: "T"
  736. number_attr: "N"
  737. }
  738. output_arg {
  739. name: "output"
  740. type_attr: "T"
  741. }
  742. output_arg {
  743. name: "value_index"
  744. type: DT_INT32
  745. }
  746. attr {
  747. name: "T"
  748. type: "type"
  749. }
  750. attr {
  751. name: "N"
  752. type: "int"
  753. has_minimum: true
  754. minimum: 1
  755. }
  756. }
  757. op {
  758. name: "Minimum"
  759. input_arg {
  760. name: "x"
  761. type_attr: "T"
  762. }
  763. input_arg {
  764. name: "y"
  765. type_attr: "T"
  766. }
  767. output_arg {
  768. name: "z"
  769. type_attr: "T"
  770. }
  771. attr {
  772. name: "T"
  773. type: "type"
  774. allowed_values {
  775. list {
  776. type: DT_BFLOAT16
  777. type: DT_HALF
  778. type: DT_FLOAT
  779. type: DT_DOUBLE
  780. type: DT_INT32
  781. type: DT_INT64
  782. }
  783. }
  784. }
  785. is_commutative: true
  786. }
  787. op {
  788. name: "Mul"
  789. input_arg {
  790. name: "x"
  791. type_attr: "T"
  792. }
  793. input_arg {
  794. name: "y"
  795. type_attr: "T"
  796. }
  797. output_arg {
  798. name: "z"
  799. type_attr: "T"
  800. }
  801. attr {
  802. name: "T"
  803. type: "type"
  804. allowed_values {
  805. list {
  806. type: DT_BFLOAT16
  807. type: DT_HALF
  808. type: DT_FLOAT
  809. type: DT_DOUBLE
  810. type: DT_UINT8
  811. type: DT_INT8
  812. type: DT_UINT16
  813. type: DT_INT16
  814. type: DT_INT32
  815. type: DT_INT64
  816. type: DT_COMPLEX64
  817. type: DT_COMPLEX128
  818. }
  819. }
  820. }
  821. is_commutative: true
  822. }
  823. op {
  824. name: "NoOp"
  825. }
  826. op {
  827. name: "Placeholder"
  828. output_arg {
  829. name: "output"
  830. type_attr: "dtype"
  831. }
  832. attr {
  833. name: "dtype"
  834. type: "type"
  835. }
  836. attr {
  837. name: "shape"
  838. type: "shape"
  839. default_value {
  840. shape {
  841. unknown_rank: true
  842. }
  843. }
  844. }
  845. }
  846. op {
  847. name: "Pow"
  848. input_arg {
  849. name: "x"
  850. type_attr: "T"
  851. }
  852. input_arg {
  853. name: "y"
  854. type_attr: "T"
  855. }
  856. output_arg {
  857. name: "z"
  858. type_attr: "T"
  859. }
  860. attr {
  861. name: "T"
  862. type: "type"
  863. allowed_values {
  864. list {
  865. type: DT_BFLOAT16
  866. type: DT_FLOAT
  867. type: DT_HALF
  868. type: DT_DOUBLE
  869. type: DT_INT32
  870. type: DT_INT64
  871. type: DT_COMPLEX64
  872. type: DT_COMPLEX128
  873. }
  874. }
  875. }
  876. }
  877. op {
  878. name: "PreventGradient"
  879. input_arg {
  880. name: "input"
  881. type_attr: "T"
  882. }
  883. output_arg {
  884. name: "output"
  885. type_attr: "T"
  886. }
  887. attr {
  888. name: "T"
  889. type: "type"
  890. }
  891. attr {
  892. name: "message"
  893. type: "string"
  894. default_value {
  895. s: ""
  896. }
  897. }
  898. }
  899. op {
  900. name: "Prod"
  901. input_arg {
  902. name: "input"
  903. type_attr: "T"
  904. }
  905. input_arg {
  906. name: "reduction_indices"
  907. type_attr: "Tidx"
  908. }
  909. output_arg {
  910. name: "output"
  911. type_attr: "T"
  912. }
  913. attr {
  914. name: "keep_dims"
  915. type: "bool"
  916. default_value {
  917. b: false
  918. }
  919. }
  920. attr {
  921. name: "T"
  922. type: "type"
  923. allowed_values {
  924. list {
  925. type: DT_FLOAT
  926. type: DT_DOUBLE
  927. type: DT_INT32
  928. type: DT_UINT8
  929. type: DT_INT16
  930. type: DT_INT8
  931. type: DT_COMPLEX64
  932. type: DT_INT64
  933. type: DT_QINT8
  934. type: DT_QUINT8
  935. type: DT_QINT32
  936. type: DT_BFLOAT16
  937. type: DT_UINT16
  938. type: DT_COMPLEX128
  939. type: DT_HALF
  940. type: DT_UINT32
  941. type: DT_UINT64
  942. }
  943. }
  944. }
  945. attr {
  946. name: "Tidx"
  947. type: "type"
  948. default_value {
  949. type: DT_INT32
  950. }
  951. allowed_values {
  952. list {
  953. type: DT_INT32
  954. type: DT_INT64
  955. }
  956. }
  957. }
  958. }
  959. op {
  960. name: "RealDiv"
  961. input_arg {
  962. name: "x"
  963. type_attr: "T"
  964. }
  965. input_arg {
  966. name: "y"
  967. type_attr: "T"
  968. }
  969. output_arg {
  970. name: "z"
  971. type_attr: "T"
  972. }
  973. attr {
  974. name: "T"
  975. type: "type"
  976. allowed_values {
  977. list {
  978. type: DT_BFLOAT16
  979. type: DT_HALF
  980. type: DT_FLOAT
  981. type: DT_DOUBLE
  982. type: DT_UINT8
  983. type: DT_INT8
  984. type: DT_UINT16
  985. type: DT_INT16
  986. type: DT_INT32
  987. type: DT_INT64
  988. type: DT_COMPLEX64
  989. type: DT_COMPLEX128
  990. }
  991. }
  992. }
  993. }
  994. op {
  995. name: "RefSwitch"
  996. input_arg {
  997. name: "data"
  998. type_attr: "T"
  999. is_ref: true
  1000. }
  1001. input_arg {
  1002. name: "pred"
  1003. type: DT_BOOL
  1004. }
  1005. output_arg {
  1006. name: "output_false"
  1007. type_attr: "T"
  1008. is_ref: true
  1009. }
  1010. output_arg {
  1011. name: "output_true"
  1012. type_attr: "T"
  1013. is_ref: true
  1014. }
  1015. attr {
  1016. name: "T"
  1017. type: "type"
  1018. }
  1019. allows_uninitialized_input: true
  1020. }
  1021. op {
  1022. name: "Relu"
  1023. input_arg {
  1024. name: "features"
  1025. type_attr: "T"
  1026. }
  1027. output_arg {
  1028. name: "activations"
  1029. type_attr: "T"
  1030. }
  1031. attr {
  1032. name: "T"
  1033. type: "type"
  1034. allowed_values {
  1035. list {
  1036. type: DT_FLOAT
  1037. type: DT_DOUBLE
  1038. type: DT_INT32
  1039. type: DT_UINT8
  1040. type: DT_INT16
  1041. type: DT_INT8
  1042. type: DT_INT64
  1043. type: DT_BFLOAT16
  1044. type: DT_UINT16
  1045. type: DT_HALF
  1046. type: DT_UINT32
  1047. type: DT_UINT64
  1048. }
  1049. }
  1050. }
  1051. }
  1052. op {
  1053. name: "ReluGrad"
  1054. input_arg {
  1055. name: "gradients"
  1056. type_attr: "T"
  1057. }
  1058. input_arg {
  1059. name: "features"
  1060. type_attr: "T"
  1061. }
  1062. output_arg {
  1063. name: "backprops"
  1064. type_attr: "T"
  1065. }
  1066. attr {
  1067. name: "T"
  1068. type: "type"
  1069. allowed_values {
  1070. list {
  1071. type: DT_FLOAT
  1072. type: DT_DOUBLE
  1073. type: DT_INT32
  1074. type: DT_UINT8
  1075. type: DT_INT16
  1076. type: DT_INT8
  1077. type: DT_INT64
  1078. type: DT_BFLOAT16
  1079. type: DT_UINT16
  1080. type: DT_HALF
  1081. type: DT_UINT32
  1082. type: DT_UINT64
  1083. }
  1084. }
  1085. }
  1086. }
  1087. op {
  1088. name: "Reshape"
  1089. input_arg {
  1090. name: "tensor"
  1091. type_attr: "T"
  1092. }
  1093. input_arg {
  1094. name: "shape"
  1095. type_attr: "Tshape"
  1096. }
  1097. output_arg {
  1098. name: "output"
  1099. type_attr: "T"
  1100. }
  1101. attr {
  1102. name: "T"
  1103. type: "type"
  1104. }
  1105. attr {
  1106. name: "Tshape"
  1107. type: "type"
  1108. default_value {
  1109. type: DT_INT32
  1110. }
  1111. allowed_values {
  1112. list {
  1113. type: DT_INT32
  1114. type: DT_INT64
  1115. }
  1116. }
  1117. }
  1118. }
  1119. op {
  1120. name: "RestoreV2"
  1121. input_arg {
  1122. name: "prefix"
  1123. type: DT_STRING
  1124. }
  1125. input_arg {
  1126. name: "tensor_names"
  1127. type: DT_STRING
  1128. }
  1129. input_arg {
  1130. name: "shape_and_slices"
  1131. type: DT_STRING
  1132. }
  1133. output_arg {
  1134. name: "tensors"
  1135. type_list_attr: "dtypes"
  1136. }
  1137. attr {
  1138. name: "dtypes"
  1139. type: "list(type)"
  1140. has_minimum: true
  1141. minimum: 1
  1142. }
  1143. is_stateful: true
  1144. }
  1145. op {
  1146. name: "SaveV2"
  1147. input_arg {
  1148. name: "prefix"
  1149. type: DT_STRING
  1150. }
  1151. input_arg {
  1152. name: "tensor_names"
  1153. type: DT_STRING
  1154. }
  1155. input_arg {
  1156. name: "shape_and_slices"
  1157. type: DT_STRING
  1158. }
  1159. input_arg {
  1160. name: "tensors"
  1161. type_list_attr: "dtypes"
  1162. }
  1163. attr {
  1164. name: "dtypes"
  1165. type: "list(type)"
  1166. has_minimum: true
  1167. minimum: 1
  1168. }
  1169. is_stateful: true
  1170. }
  1171. op {
  1172. name: "Shape"
  1173. input_arg {
  1174. name: "input"
  1175. type_attr: "T"
  1176. }
  1177. output_arg {
  1178. name: "output"
  1179. type_attr: "out_type"
  1180. }
  1181. attr {
  1182. name: "T"
  1183. type: "type"
  1184. }
  1185. attr {
  1186. name: "out_type"
  1187. type: "type"
  1188. default_value {
  1189. type: DT_INT32
  1190. }
  1191. allowed_values {
  1192. list {
  1193. type: DT_INT32
  1194. type: DT_INT64
  1195. }
  1196. }
  1197. }
  1198. }
  1199. op {
  1200. name: "SparseSoftmaxCrossEntropyWithLogits"
  1201. input_arg {
  1202. name: "features"
  1203. type_attr: "T"
  1204. }
  1205. input_arg {
  1206. name: "labels"
  1207. type_attr: "Tlabels"
  1208. }
  1209. output_arg {
  1210. name: "loss"
  1211. type_attr: "T"
  1212. }
  1213. output_arg {
  1214. name: "backprop"
  1215. type_attr: "T"
  1216. }
  1217. attr {
  1218. name: "T"
  1219. type: "type"
  1220. allowed_values {
  1221. list {
  1222. type: DT_HALF
  1223. type: DT_BFLOAT16
  1224. type: DT_FLOAT
  1225. type: DT_DOUBLE
  1226. }
  1227. }
  1228. }
  1229. attr {
  1230. name: "Tlabels"
  1231. type: "type"
  1232. default_value {
  1233. type: DT_INT64
  1234. }
  1235. allowed_values {
  1236. list {
  1237. type: DT_INT32
  1238. type: DT_INT64
  1239. }
  1240. }
  1241. }
  1242. }
  1243. op {
  1244. name: "Sub"
  1245. input_arg {
  1246. name: "x"
  1247. type_attr: "T"
  1248. }
  1249. input_arg {
  1250. name: "y"
  1251. type_attr: "T"
  1252. }
  1253. output_arg {
  1254. name: "z"
  1255. type_attr: "T"
  1256. }
  1257. attr {
  1258. name: "T"
  1259. type: "type"
  1260. allowed_values {
  1261. list {
  1262. type: DT_BFLOAT16
  1263. type: DT_HALF
  1264. type: DT_FLOAT
  1265. type: DT_DOUBLE
  1266. type: DT_UINT8
  1267. type: DT_INT8
  1268. type: DT_UINT16
  1269. type: DT_INT16
  1270. type: DT_INT32
  1271. type: DT_INT64
  1272. type: DT_COMPLEX64
  1273. type: DT_COMPLEX128
  1274. }
  1275. }
  1276. }
  1277. }
  1278. op {
  1279. name: "Sum"
  1280. input_arg {
  1281. name: "input"
  1282. type_attr: "T"
  1283. }
  1284. input_arg {
  1285. name: "reduction_indices"
  1286. type_attr: "Tidx"
  1287. }
  1288. output_arg {
  1289. name: "output"
  1290. type_attr: "T"
  1291. }
  1292. attr {
  1293. name: "keep_dims"
  1294. type: "bool"
  1295. default_value {
  1296. b: false
  1297. }
  1298. }
  1299. attr {
  1300. name: "T"
  1301. type: "type"
  1302. allowed_values {
  1303. list {
  1304. type: DT_FLOAT
  1305. type: DT_DOUBLE
  1306. type: DT_INT32
  1307. type: DT_UINT8
  1308. type: DT_INT16
  1309. type: DT_INT8
  1310. type: DT_COMPLEX64
  1311. type: DT_INT64
  1312. type: DT_QINT8
  1313. type: DT_QUINT8
  1314. type: DT_QINT32
  1315. type: DT_BFLOAT16
  1316. type: DT_UINT16
  1317. type: DT_COMPLEX128
  1318. type: DT_HALF
  1319. type: DT_UINT32
  1320. type: DT_UINT64
  1321. }
  1322. }
  1323. }
  1324. attr {
  1325. name: "Tidx"
  1326. type: "type"
  1327. default_value {
  1328. type: DT_INT32
  1329. }
  1330. allowed_values {
  1331. list {
  1332. type: DT_INT32
  1333. type: DT_INT64
  1334. }
  1335. }
  1336. }
  1337. }
  1338. op {
  1339. name: "Switch"
  1340. input_arg {
  1341. name: "data"
  1342. type_attr: "T"
  1343. }
  1344. input_arg {
  1345. name: "pred"
  1346. type: DT_BOOL
  1347. }
  1348. output_arg {
  1349. name: "output_false"
  1350. type_attr: "T"
  1351. }
  1352. output_arg {
  1353. name: "output_true"
  1354. type_attr: "T"
  1355. }
  1356. attr {
  1357. name: "T"
  1358. type: "type"
  1359. }
  1360. }
  1361. op {
  1362. name: "Tile"
  1363. input_arg {
  1364. name: "input"
  1365. type_attr: "T"
  1366. }
  1367. input_arg {
  1368. name: "multiples"
  1369. type_attr: "Tmultiples"
  1370. }
  1371. output_arg {
  1372. name: "output"
  1373. type_attr: "T"
  1374. }
  1375. attr {
  1376. name: "T"
  1377. type: "type"
  1378. }
  1379. attr {
  1380. name: "Tmultiples"
  1381. type: "type"
  1382. default_value {
  1383. type: DT_INT32
  1384. }
  1385. allowed_values {
  1386. list {
  1387. type: DT_INT32
  1388. type: DT_INT64
  1389. }
  1390. }
  1391. }
  1392. }
  1393. op {
  1394. name: "TruncatedNormal"
  1395. input_arg {
  1396. name: "shape"
  1397. type_attr: "T"
  1398. }
  1399. output_arg {
  1400. name: "output"
  1401. type_attr: "dtype"
  1402. }
  1403. attr {
  1404. name: "seed"
  1405. type: "int"
  1406. default_value {
  1407. i: 0
  1408. }
  1409. }
  1410. attr {
  1411. name: "seed2"
  1412. type: "int"
  1413. default_value {
  1414. i: 0
  1415. }
  1416. }
  1417. attr {
  1418. name: "dtype"
  1419. type: "type"
  1420. allowed_values {
  1421. list {
  1422. type: DT_HALF
  1423. type: DT_BFLOAT16
  1424. type: DT_FLOAT
  1425. type: DT_DOUBLE
  1426. }
  1427. }
  1428. }
  1429. attr {
  1430. name: "T"
  1431. type: "type"
  1432. allowed_values {
  1433. list {
  1434. type: DT_INT32
  1435. type: DT_INT64
  1436. }
  1437. }
  1438. }
  1439. is_stateful: true
  1440. }
  1441. op {
  1442. name: "VariableV2"
  1443. output_arg {
  1444. name: "ref"
  1445. type_attr: "dtype"
  1446. is_ref: true
  1447. }
  1448. attr {
  1449. name: "shape"
  1450. type: "shape"
  1451. }
  1452. attr {
  1453. name: "dtype"
  1454. type: "type"
  1455. }
  1456. attr {
  1457. name: "container"
  1458. type: "string"
  1459. default_value {
  1460. s: ""
  1461. }
  1462. }
  1463. attr {
  1464. name: "shared_name"
  1465. type: "string"
  1466. default_value {
  1467. s: ""
  1468. }
  1469. }
  1470. is_stateful: true
  1471. }
  1472. op {
  1473. name: "ZerosLike"
  1474. input_arg {
  1475. name: "x"
  1476. type_attr: "T"
  1477. }
  1478. output_arg {
  1479. name: "y"
  1480. type_attr: "T"
  1481. }
  1482. attr {
  1483. name: "T"
  1484. type: "type"
  1485. }
  1486. }
  1487. }
  1488. tensorflow_version: "1.8.0"
  1489. tensorflow_git_version: "v1.8.0-0-g93bc2e2072"
  1490. }
  1491. graph_def {
  1492. node {
  1493. name: "x-input"
  1494. op: "Placeholder"
  1495. attr {
  1496. key: "_output_shapes"
  1497. value {
  1498. list {
  1499. shape {
  1500. dim {
  1501. size: -1
  1502. }
  1503. dim {
  1504. size: 784
  1505. }
  1506. }
  1507. }
  1508. }
  1509. }
  1510. attr {
  1511. key: "dtype"
  1512. value {
  1513. type: DT_FLOAT
  1514. }
  1515. }
  1516. attr {
  1517. key: "shape"
  1518. value {
  1519. shape {
  1520. dim {
  1521. size: -1
  1522. }
  1523. dim {
  1524. size: 784
  1525. }
  1526. }
  1527. }
  1528. }
  1529. }
  1530. node {
  1531. name: "layer1/weights/Initializer/truncated_normal/shape"
  1532. op: "Const"
  1533. attr {
  1534. key: "_class"
  1535. value {
  1536. list {
  1537. s: "loc:@layer1/weights"
  1538. }
  1539. }
  1540. }
  1541. attr {
  1542. key: "_output_shapes"
  1543. value {
  1544. list {
  1545. shape {
  1546. dim {
  1547. size: 2
  1548. }
  1549. }
  1550. }
  1551. }
  1552. }
  1553. attr {
  1554. key: "dtype"
  1555. value {
  1556. type: DT_INT32
  1557. }
  1558. }
  1559. attr {
  1560. key: "value"
  1561. value {
  1562. tensor {
  1563. dtype: DT_INT32
  1564. tensor_shape {
  1565. dim {
  1566. size: 2
  1567. }
  1568. }
  1569. tensor_content: "\020\003\000\000\364\001\000\000"
  1570. }
  1571. }
  1572. }
  1573. }
  1574. node {
  1575. name: "layer1/weights/Initializer/truncated_normal/mean"
  1576. op: "Const"
  1577. attr {
  1578. key: "_class"
  1579. value {
  1580. list {
  1581. s: "loc:@layer1/weights"
  1582. }
  1583. }
  1584. }
  1585. attr {
  1586. key: "_output_shapes"
  1587. value {
  1588. list {
  1589. shape {
  1590. }
  1591. }
  1592. }
  1593. }
  1594. attr {
  1595. key: "dtype"
  1596. value {
  1597. type: DT_FLOAT
  1598. }
  1599. }
  1600. attr {
  1601. key: "value"
  1602. value {
  1603. tensor {
  1604. dtype: DT_FLOAT
  1605. tensor_shape {
  1606. }
  1607. float_val: 0.0
  1608. }
  1609. }
  1610. }
  1611. }
  1612. node {
  1613. name: "layer1/weights/Initializer/truncated_normal/stddev"
  1614. op: "Const"
  1615. attr {
  1616. key: "_class"
  1617. value {
  1618. list {
  1619. s: "loc:@layer1/weights"
  1620. }
  1621. }
  1622. }
  1623. attr {
  1624. key: "_output_shapes"
  1625. value {
  1626. list {
  1627. shape {
  1628. }
  1629. }
  1630. }
  1631. }
  1632. attr {
  1633. key: "dtype"
  1634. value {
  1635. type: DT_FLOAT
  1636. }
  1637. }
  1638. attr {
  1639. key: "value"
  1640. value {
  1641. tensor {
  1642. dtype: DT_FLOAT
  1643. tensor_shape {
  1644. }
  1645. float_val: 0.10000000149
  1646. }
  1647. }
  1648. }
  1649. }
  1650. node {
  1651. name: "layer1/weights/Initializer/truncated_normal/TruncatedNormal"
  1652. op: "TruncatedNormal"
  1653. input: "layer1/weights/Initializer/truncated_normal/shape"
  1654. attr {
  1655. key: "T"
  1656. value {
  1657. type: DT_INT32
  1658. }
  1659. }
  1660. attr {
  1661. key: "_class"
  1662. value {
  1663. list {
  1664. s: "loc:@layer1/weights"
  1665. }
  1666. }
  1667. }
  1668. attr {
  1669. key: "_output_shapes"
  1670. value {
  1671. list {
  1672. shape {
  1673. dim {
  1674. size: 784
  1675. }
  1676. dim {
  1677. size: 500
  1678. }
  1679. }
  1680. }
  1681. }
  1682. }
  1683. attr {
  1684. key: "dtype"
  1685. value {
  1686. type: DT_FLOAT
  1687. }
  1688. }
  1689. attr {
  1690. key: "seed"
  1691. value {
  1692. i: 0
  1693. }
  1694. }
  1695. attr {
  1696. key: "seed2"
  1697. value {
  1698. i: 0
  1699. }
  1700. }
  1701. }
  1702. node {
  1703. name: "layer1/weights/Initializer/truncated_normal/mul"
  1704. op: "Mul"
  1705. input: "layer1/weights/Initializer/truncated_normal/TruncatedNormal"
  1706. input: "layer1/weights/Initializer/truncated_normal/stddev"
  1707. attr {
  1708. key: "T"
  1709. value {
  1710. type: DT_FLOAT
  1711. }
  1712. }
  1713. attr {
  1714. key: "_class"
  1715. value {
  1716. list {
  1717. s: "loc:@layer1/weights"
  1718. }
  1719. }
  1720. }
  1721. attr {
  1722. key: "_output_shapes"
  1723. value {
  1724. list {
  1725. shape {
  1726. dim {
  1727. size: 784
  1728. }
  1729. dim {
  1730. size: 500
  1731. }
  1732. }
  1733. }
  1734. }
  1735. }
  1736. }
  1737. node {
  1738. name: "layer1/weights/Initializer/truncated_normal"
  1739. op: "Add"
  1740. input: "layer1/weights/Initializer/truncated_normal/mul"
  1741. input: "layer1/weights/Initializer/truncated_normal/mean"
  1742. attr {
  1743. key: "T"
  1744. value {
  1745. type: DT_FLOAT
  1746. }
  1747. }
  1748. attr {
  1749. key: "_class"
  1750. value {
  1751. list {
  1752. s: "loc:@layer1/weights"
  1753. }
  1754. }
  1755. }
  1756. attr {
  1757. key: "_output_shapes"
  1758. value {
  1759. list {
  1760. shape {
  1761. dim {
  1762. size: 784
  1763. }
  1764. dim {
  1765. size: 500
  1766. }
  1767. }
  1768. }
  1769. }
  1770. }
  1771. }
  1772. node {
  1773. name: "layer1/weights"
  1774. op: "VariableV2"
  1775. attr {
  1776. key: "_class"
  1777. value {
  1778. list {
  1779. s: "loc:@layer1/weights"
  1780. }
  1781. }
  1782. }
  1783. attr {
  1784. key: "_output_shapes"
  1785. value {
  1786. list {
  1787. shape {
  1788. dim {
  1789. size: 784
  1790. }
  1791. dim {
  1792. size: 500
  1793. }
  1794. }
  1795. }
  1796. }
  1797. }
  1798. attr {
  1799. key: "container"
  1800. value {
  1801. s: ""
  1802. }
  1803. }
  1804. attr {
  1805. key: "dtype"
  1806. value {
  1807. type: DT_FLOAT
  1808. }
  1809. }
  1810. attr {
  1811. key: "shape"
  1812. value {
  1813. shape {
  1814. dim {
  1815. size: 784
  1816. }
  1817. dim {
  1818. size: 500
  1819. }
  1820. }
  1821. }
  1822. }
  1823. attr {
  1824. key: "shared_name"
  1825. value {
  1826. s: ""
  1827. }
  1828. }
  1829. }
  1830. node {
  1831. name: "layer1/weights/Assign"
  1832. op: "Assign"
  1833. input: "layer1/weights"
  1834. input: "layer1/weights/Initializer/truncated_normal"
  1835. attr {
  1836. key: "T"
  1837. value {
  1838. type: DT_FLOAT
  1839. }
  1840. }
  1841. attr {
  1842. key: "_class"
  1843. value {
  1844. list {
  1845. s: "loc:@layer1/weights"
  1846. }
  1847. }
  1848. }
  1849. attr {
  1850. key: "_output_shapes"
  1851. value {
  1852. list {
  1853. shape {
  1854. dim {
  1855. size: 784
  1856. }
  1857. dim {
  1858. size: 500
  1859. }
  1860. }
  1861. }
  1862. }
  1863. }
  1864. attr {
  1865. key: "use_locking"
  1866. value {
  1867. b: true
  1868. }
  1869. }
  1870. attr {
  1871. key: "validate_shape"
  1872. value {
  1873. b: true
  1874. }
  1875. }
  1876. }
  1877. node {
  1878. name: "layer1/weights/read"
  1879. op: "Identity"
  1880. input: "layer1/weights"
  1881. attr {
  1882. key: "T"
  1883. value {
  1884. type: DT_FLOAT
  1885. }
  1886. }
  1887. attr {
  1888. key: "_class"
  1889. value {
  1890. list {
  1891. s: "loc:@layer1/weights"
  1892. }
  1893. }
  1894. }
  1895. attr {
  1896. key: "_output_shapes"
  1897. value {
  1898. list {
  1899. shape {
  1900. dim {
  1901. size: 784
  1902. }
  1903. dim {
  1904. size: 500
  1905. }
  1906. }
  1907. }
  1908. }
  1909. }
  1910. }
  1911. node {
  1912. name: "layer1/biases/Initializer/Const"
  1913. op: "Const"
  1914. attr {
  1915. key: "_class"
  1916. value {
  1917. list {
  1918. s: "loc:@layer1/biases"
  1919. }
  1920. }
  1921. }
  1922. attr {
  1923. key: "_output_shapes"
  1924. value {
  1925. list {
  1926. shape {
  1927. dim {
  1928. size: 500
  1929. }
  1930. }
  1931. }
  1932. }
  1933. }
  1934. attr {
  1935. key: "dtype"
  1936. value {
  1937. type: DT_FLOAT
  1938. }
  1939. }
  1940. attr {
  1941. key: "value"
  1942. value {
  1943. tensor {
  1944. dtype: DT_FLOAT
  1945. tensor_shape {
  1946. dim {
  1947. size: 500
  1948. }
  1949. }
  1950. float_val: 0.10000000149
  1951. }
  1952. }
  1953. }
  1954. }
  1955. node {
  1956. name: "layer1/biases"
  1957. op: "VariableV2"
  1958. attr {
  1959. key: "_class"
  1960. value {
  1961. list {
  1962. s: "loc:@layer1/biases"
  1963. }
  1964. }
  1965. }
  1966. attr {
  1967. key: "_output_shapes"
  1968. value {
  1969. list {
  1970. shape {
  1971. dim {
  1972. size: 500
  1973. }
  1974. }
  1975. }
  1976. }
  1977. }
  1978. attr {
  1979. key: "container"
  1980. value {
  1981. s: ""
  1982. }
  1983. }
  1984. attr {
  1985. key: "dtype"
  1986. value {
  1987. type: DT_FLOAT
  1988. }
  1989. }
  1990. attr {
  1991. key: "shape"
  1992. value {
  1993. shape {
  1994. dim {
  1995. size: 500
  1996. }
  1997. }
  1998. }
  1999. }
  2000. attr {
  2001. key: "shared_name"
  2002. value {
  2003. s: ""
  2004. }
  2005. }
  2006. }
  2007. node {
  2008. name: "layer1/biases/Assign"
  2009. op: "Assign"
  2010. input: "layer1/biases"
  2011. input: "layer1/biases/Initializer/Const"
  2012. attr {
  2013. key: "T"
  2014. value {
  2015. type: DT_FLOAT
  2016. }
  2017. }
  2018. attr {
  2019. key: "_class"
  2020. value {
  2021. list {
  2022. s: "loc:@layer1/biases"
  2023. }
  2024. }
  2025. }
  2026. attr {
  2027. key: "_output_shapes"
  2028. value {
  2029. list {
  2030. shape {
  2031. dim {
  2032. size: 500
  2033. }
  2034. }
  2035. }
  2036. }
  2037. }
  2038. attr {
  2039. key: "use_locking"
  2040. value {
  2041. b: true
  2042. }
  2043. }
  2044. attr {
  2045. key: "validate_shape"
  2046. value {
  2047. b: true
  2048. }
  2049. }
  2050. }
  2051. node {
  2052. name: "layer1/biases/read"
  2053. op: "Identity"
  2054. input: "layer1/biases"
  2055. attr {
  2056. key: "T"
  2057. value {
  2058. type: DT_FLOAT
  2059. }
  2060. }
  2061. attr {
  2062. key: "_class"
  2063. value {
  2064. list {
  2065. s: "loc:@layer1/biases"
  2066. }
  2067. }
  2068. }
  2069. attr {
  2070. key: "_output_shapes"
  2071. value {
  2072. list {
  2073. shape {
  2074. dim {
  2075. size: 500
  2076. }
  2077. }
  2078. }
  2079. }
  2080. }
  2081. }
  2082. node {
  2083. name: "layer1/MatMul"
  2084. op: "MatMul"
  2085. input: "x-input"
  2086. input: "layer1/weights/read"
  2087. attr {
  2088. key: "T"
  2089. value {
  2090. type: DT_FLOAT
  2091. }
  2092. }
  2093. attr {
  2094. key: "_output_shapes"
  2095. value {
  2096. list {
  2097. shape {
  2098. dim {
  2099. size: -1
  2100. }
  2101. dim {
  2102. size: 500
  2103. }
  2104. }
  2105. }
  2106. }
  2107. }
  2108. attr {
  2109. key: "transpose_a"
  2110. value {
  2111. b: false
  2112. }
  2113. }
  2114. attr {
  2115. key: "transpose_b"
  2116. value {
  2117. b: false
  2118. }
  2119. }
  2120. }
  2121. node {
  2122. name: "layer1/add"
  2123. op: "Add"
  2124. input: "layer1/MatMul"
  2125. input: "layer1/biases/read"
  2126. attr {
  2127. key: "T"
  2128. value {
  2129. type: DT_FLOAT
  2130. }
  2131. }
  2132. attr {
  2133. key: "_output_shapes"
  2134. value {
  2135. list {
  2136. shape {
  2137. dim {
  2138. size: -1
  2139. }
  2140. dim {
  2141. size: 500
  2142. }
  2143. }
  2144. }
  2145. }
  2146. }
  2147. }
  2148. node {
  2149. name: "layer1/Relu"
  2150. op: "Relu"
  2151. input: "layer1/add"
  2152. attr {
  2153. key: "T"
  2154. value {
  2155. type: DT_FLOAT
  2156. }
  2157. }
  2158. attr {
  2159. key: "_output_shapes"
  2160. value {
  2161. list {
  2162. shape {
  2163. dim {
  2164. size: -1
  2165. }
  2166. dim {
  2167. size: 500
  2168. }
  2169. }
  2170. }
  2171. }
  2172. }
  2173. }
  2174. node {
  2175. name: "layer2/weights/Initializer/truncated_normal/shape"
  2176. op: "Const"
  2177. attr {
  2178. key: "_class"
  2179. value {
  2180. list {
  2181. s: "loc:@layer2/weights"
  2182. }
  2183. }
  2184. }
  2185. attr {
  2186. key: "_output_shapes"
  2187. value {
  2188. list {
  2189. shape {
  2190. dim {
  2191. size: 2
  2192. }
  2193. }
  2194. }
  2195. }
  2196. }
  2197. attr {
  2198. key: "dtype"
  2199. value {
  2200. type: DT_INT32
  2201. }
  2202. }
  2203. attr {
  2204. key: "value"
  2205. value {
  2206. tensor {
  2207. dtype: DT_INT32
  2208. tensor_shape {
  2209. dim {
  2210. size: 2
  2211. }
  2212. }
  2213. tensor_content: "\364\001\000\000\n\000\000\000"
  2214. }
  2215. }
  2216. }
  2217. }
  2218. node {
  2219. name: "layer2/weights/Initializer/truncated_normal/mean"
  2220. op: "Const"
  2221. attr {
  2222. key: "_class"
  2223. value {
  2224. list {
  2225. s: "loc:@layer2/weights"
  2226. }
  2227. }
  2228. }
  2229. attr {
  2230. key: "_output_shapes"
  2231. value {
  2232. list {
  2233. shape {
  2234. }
  2235. }
  2236. }
  2237. }
  2238. attr {
  2239. key: "dtype"
  2240. value {
  2241. type: DT_FLOAT
  2242. }
  2243. }
  2244. attr {
  2245. key: "value"
  2246. value {
  2247. tensor {
  2248. dtype: DT_FLOAT
  2249. tensor_shape {
  2250. }
  2251. float_val: 0.0
  2252. }
  2253. }
  2254. }
  2255. }
  2256. node {
  2257. name: "layer2/weights/Initializer/truncated_normal/stddev"
  2258. op: "Const"
  2259. attr {
  2260. key: "_class"
  2261. value {
  2262. list {
  2263. s: "loc:@layer2/weights"
  2264. }
  2265. }
  2266. }
  2267. attr {
  2268. key: "_output_shapes"
  2269. value {
  2270. list {
  2271. shape {
  2272. }
  2273. }
  2274. }
  2275. }
  2276. attr {
  2277. key: "dtype"
  2278. value {
  2279. type: DT_FLOAT
  2280. }
  2281. }
  2282. attr {
  2283. key: "value"
  2284. value {
  2285. tensor {
  2286. dtype: DT_FLOAT
  2287. tensor_shape {
  2288. }
  2289. float_val: 0.10000000149
  2290. }
  2291. }
  2292. }
  2293. }
  2294. node {
  2295. name: "layer2/weights/Initializer/truncated_normal/TruncatedNormal"
  2296. op: "TruncatedNormal"
  2297. input: "layer2/weights/Initializer/truncated_normal/shape"
  2298. attr {
  2299. key: "T"
  2300. value {
  2301. type: DT_INT32
  2302. }
  2303. }
  2304. attr {
  2305. key: "_class"
  2306. value {
  2307. list {
  2308. s: "loc:@layer2/weights"
  2309. }
  2310. }
  2311. }
  2312. attr {
  2313. key: "_output_shapes"
  2314. value {
  2315. list {
  2316. shape {
  2317. dim {
  2318. size: 500
  2319. }
  2320. dim {
  2321. size: 10
  2322. }
  2323. }
  2324. }
  2325. }
  2326. }
  2327. attr {
  2328. key: "dtype"
  2329. value {
  2330. type: DT_FLOAT
  2331. }
  2332. }
  2333. attr {
  2334. key: "seed"
  2335. value {
  2336. i: 0
  2337. }
  2338. }
  2339. attr {
  2340. key: "seed2"
  2341. value {
  2342. i: 0
  2343. }
  2344. }
  2345. }
  2346. node {
  2347. name: "layer2/weights/Initializer/truncated_normal/mul"
  2348. op: "Mul"
  2349. input: "layer2/weights/Initializer/truncated_normal/TruncatedNormal"
  2350. input: "layer2/weights/Initializer/truncated_normal/stddev"
  2351. attr {
  2352. key: "T"
  2353. value {
  2354. type: DT_FLOAT
  2355. }
  2356. }
  2357. attr {
  2358. key: "_class"
  2359. value {
  2360. list {
  2361. s: "loc:@layer2/weights"
  2362. }
  2363. }
  2364. }
  2365. attr {
  2366. key: "_output_shapes"
  2367. value {
  2368. list {
  2369. shape {
  2370. dim {
  2371. size: 500
  2372. }
  2373. dim {
  2374. size: 10
  2375. }
  2376. }
  2377. }
  2378. }
  2379. }
  2380. }
  2381. node {
  2382. name: "layer2/weights/Initializer/truncated_normal"
  2383. op: "Add"
  2384. input: "layer2/weights/Initializer/truncated_normal/mul"
  2385. input: "layer2/weights/Initializer/truncated_normal/mean"
  2386. attr {
  2387. key: "T"
  2388. value {
  2389. type: DT_FLOAT
  2390. }
  2391. }
  2392. attr {
  2393. key: "_class"
  2394. value {
  2395. list {
  2396. s: "loc:@layer2/weights"
  2397. }
  2398. }
  2399. }
  2400. attr {
  2401. key: "_output_shapes"
  2402. value {
  2403. list {
  2404. shape {
  2405. dim {
  2406. size: 500
  2407. }
  2408. dim {
  2409. size: 10
  2410. }
  2411. }
  2412. }
  2413. }
  2414. }
  2415. }
  2416. node {
  2417. name: "layer2/weights"
  2418. op: "VariableV2"
  2419. attr {
  2420. key: "_class"
  2421. value {
  2422. list {
  2423. s: "loc:@layer2/weights"
  2424. }
  2425. }
  2426. }
  2427. attr {
  2428. key: "_output_shapes"
  2429. value {
  2430. list {
  2431. shape {
  2432. dim {
  2433. size: 500
  2434. }
  2435. dim {
  2436. size: 10
  2437. }
  2438. }
  2439. }
  2440. }
  2441. }
  2442. attr {
  2443. key: "container"
  2444. value {
  2445. s: ""
  2446. }
  2447. }
  2448. attr {
  2449. key: "dtype"
  2450. value {
  2451. type: DT_FLOAT
  2452. }
  2453. }
  2454. attr {
  2455. key: "shape"
  2456. value {
  2457. shape {
  2458. dim {
  2459. size: 500
  2460. }
  2461. dim {
  2462. size: 10
  2463. }
  2464. }
  2465. }
  2466. }
  2467. attr {
  2468. key: "shared_name"
  2469. value {
  2470. s: ""
  2471. }
  2472. }
  2473. }
  2474. node {
  2475. name: "layer2/weights/Assign"
  2476. op: "Assign"
  2477. input: "layer2/weights"
  2478. input: "layer2/weights/Initializer/truncated_normal"
  2479. attr {
  2480. key: "T"
  2481. value {
  2482. type: DT_FLOAT
  2483. }
  2484. }
  2485. attr {
  2486. key: "_class"
  2487. value {
  2488. list {
  2489. s: "loc:@layer2/weights"
  2490. }
  2491. }
  2492. }
  2493. attr {
  2494. key: "_output_shapes"
  2495. value {
  2496. list {
  2497. shape {
  2498. dim {
  2499. size: 500
  2500. }
  2501. dim {
  2502. size: 10
  2503. }
  2504. }
  2505. }
  2506. }
  2507. }
  2508. attr {
  2509. key: "use_locking"
  2510. value {
  2511. b: true
  2512. }
  2513. }
  2514. attr {
  2515. key: "validate_shape"
  2516. value {
  2517. b: true
  2518. }
  2519. }
  2520. }
  2521. node {
  2522. name: "layer2/weights/read"
  2523. op: "Identity"
  2524. input: "layer2/weights"
  2525. attr {
  2526. key: "T"
  2527. value {
  2528. type: DT_FLOAT
  2529. }
  2530. }
  2531. attr {
  2532. key: "_class"
  2533. value {
  2534. list {
  2535. s: "loc:@layer2/weights"
  2536. }
  2537. }
  2538. }
  2539. attr {
  2540. key: "_output_shapes"
  2541. value {
  2542. list {
  2543. shape {
  2544. dim {
  2545. size: 500
  2546. }
  2547. dim {
  2548. size: 10
  2549. }
  2550. }
  2551. }
  2552. }
  2553. }
  2554. }
  2555. node {
  2556. name: "layer2/biases/Initializer/Const"
  2557. op: "Const"
  2558. attr {
  2559. key: "_class"
  2560. value {
  2561. list {
  2562. s: "loc:@layer2/biases"
  2563. }
  2564. }
  2565. }
  2566. attr {
  2567. key: "_output_shapes"
  2568. value {
  2569. list {
  2570. shape {
  2571. dim {
  2572. size: 10
  2573. }
  2574. }
  2575. }
  2576. }
  2577. }
  2578. attr {
  2579. key: "dtype"
  2580. value {
  2581. type: DT_FLOAT
  2582. }
  2583. }
  2584. attr {
  2585. key: "value"
  2586. value {
  2587. tensor {
  2588. dtype: DT_FLOAT
  2589. tensor_shape {
  2590. dim {
  2591. size: 10
  2592. }
  2593. }
  2594. float_val: 0.10000000149
  2595. }
  2596. }
  2597. }
  2598. }
  2599. node {
  2600. name: "layer2/biases"
  2601. op: "VariableV2"
  2602. attr {
  2603. key: "_class"
  2604. value {
  2605. list {
  2606. s: "loc:@layer2/biases"
  2607. }
  2608. }
  2609. }
  2610. attr {
  2611. key: "_output_shapes"
  2612. value {
  2613. list {
  2614. shape {
  2615. dim {
  2616. size: 10
  2617. }
  2618. }
  2619. }
  2620. }
  2621. }
  2622. attr {
  2623. key: "container"
  2624. value {
  2625. s: ""
  2626. }
  2627. }
  2628. attr {
  2629. key: "dtype"
  2630. value {
  2631. type: DT_FLOAT
  2632. }
  2633. }
  2634. attr {
  2635. key: "shape"
  2636. value {
  2637. shape {
  2638. dim {
  2639. size: 10
  2640. }
  2641. }
  2642. }
  2643. }
  2644. attr {
  2645. key: "shared_name"
  2646. value {
  2647. s: ""
  2648. }
  2649. }
  2650. }
  2651. node {
  2652. name: "layer2/biases/Assign"
  2653. op: "Assign"
  2654. input: "layer2/biases"
  2655. input: "layer2/biases/Initializer/Const"
  2656. attr {
  2657. key: "T"
  2658. value {
  2659. type: DT_FLOAT
  2660. }
  2661. }
  2662. attr {
  2663. key: "_class"
  2664. value {
  2665. list {
  2666. s: "loc:@layer2/biases"
  2667. }
  2668. }
  2669. }
  2670. attr {
  2671. key: "_output_shapes"
  2672. value {
  2673. list {
  2674. shape {
  2675. dim {
  2676. size: 10
  2677. }
  2678. }
  2679. }
  2680. }
  2681. }
  2682. attr {
  2683. key: "use_locking"
  2684. value {
  2685. b: true
  2686. }
  2687. }
  2688. attr {
  2689. key: "validate_shape"
  2690. value {
  2691. b: true
  2692. }
  2693. }
  2694. }
  2695. node {
  2696. name: "layer2/biases/read"
  2697. op: "Identity"
  2698. input: "layer2/biases"
  2699. attr {
  2700. key: "T"
  2701. value {
  2702. type: DT_FLOAT
  2703. }
  2704. }
  2705. attr {
  2706. key: "_class"
  2707. value {
  2708. list {
  2709. s: "loc:@layer2/biases"
  2710. }
  2711. }
  2712. }
  2713. attr {
  2714. key: "_output_shapes"
  2715. value {
  2716. list {
  2717. shape {
  2718. dim {
  2719. size: 10
  2720. }
  2721. }
  2722. }
  2723. }
  2724. }
  2725. }
  2726. node {
  2727. name: "layer2/MatMul"
  2728. op: "MatMul"
  2729. input: "layer1/Relu"
  2730. input: "layer2/weights/read"
  2731. attr {
  2732. key: "T"
  2733. value {
  2734. type: DT_FLOAT
  2735. }
  2736. }
  2737. attr {
  2738. key: "_output_shapes"
  2739. value {
  2740. list {
  2741. shape {
  2742. dim {
  2743. size: -1
  2744. }
  2745. dim {
  2746. size: 10
  2747. }
  2748. }
  2749. }
  2750. }
  2751. }
  2752. attr {
  2753. key: "transpose_a"
  2754. value {
  2755. b: false
  2756. }
  2757. }
  2758. attr {
  2759. key: "transpose_b"
  2760. value {
  2761. b: false
  2762. }
  2763. }
  2764. }
  2765. node {
  2766. name: "layer2/add"
  2767. op: "Add"
  2768. input: "layer2/MatMul"
  2769. input: "layer2/biases/read"
  2770. attr {
  2771. key: "T"
  2772. value {
  2773. type: DT_FLOAT
  2774. }
  2775. }
  2776. attr {
  2777. key: "_output_shapes"
  2778. value {
  2779. list {
  2780. shape {
  2781. dim {
  2782. size: -1
  2783. }
  2784. dim {
  2785. size: 10
  2786. }
  2787. }
  2788. }
  2789. }
  2790. }
  2791. }
  2792. node {
  2793. name: "layer1_1/MatMul"
  2794. op: "MatMul"
  2795. input: "x-input"
  2796. input: "layer1/weights/read"
  2797. attr {
  2798. key: "T"
  2799. value {
  2800. type: DT_FLOAT
  2801. }
  2802. }
  2803. attr {
  2804. key: "_output_shapes"
  2805. value {
  2806. list {
  2807. shape {
  2808. dim {
  2809. size: -1
  2810. }
  2811. dim {
  2812. size: 500
  2813. }
  2814. }
  2815. }
  2816. }
  2817. }
  2818. attr {
  2819. key: "transpose_a"
  2820. value {
  2821. b: false
  2822. }
  2823. }
  2824. attr {
  2825. key: "transpose_b"
  2826. value {
  2827. b: false
  2828. }
  2829. }
  2830. }
  2831. node {
  2832. name: "layer1_1/add"
  2833. op: "Add"
  2834. input: "layer1_1/MatMul"
  2835. input: "layer1/biases/read"
  2836. attr {
  2837. key: "T"
  2838. value {
  2839. type: DT_FLOAT
  2840. }
  2841. }
  2842. attr {
  2843. key: "_output_shapes"
  2844. value {
  2845. list {
  2846. shape {
  2847. dim {
  2848. size: -1
  2849. }
  2850. dim {
  2851. size: 500
  2852. }
  2853. }
  2854. }
  2855. }
  2856. }
  2857. }
  2858. node {
  2859. name: "layer1_1/Relu"
  2860. op: "Relu"
  2861. input: "layer1_1/add"
  2862. attr {
  2863. key: "T"
  2864. value {
  2865. type: DT_FLOAT
  2866. }
  2867. }
  2868. attr {
  2869. key: "_output_shapes"
  2870. value {
  2871. list {
  2872. shape {
  2873. dim {
  2874. size: -1
  2875. }
  2876. dim {
  2877. size: 500
  2878. }
  2879. }
  2880. }
  2881. }
  2882. }
  2883. }
  2884. node {
  2885. name: "layer2_1/MatMul"
  2886. op: "MatMul"
  2887. input: "layer1_1/Relu"
  2888. input: "layer2/weights/read"
  2889. attr {
  2890. key: "T"
  2891. value {
  2892. type: DT_FLOAT
  2893. }
  2894. }
  2895. attr {
  2896. key: "_output_shapes"
  2897. value {
  2898. list {
  2899. shape {
  2900. dim {
  2901. size: -1
  2902. }
  2903. dim {
  2904. size: 10
  2905. }
  2906. }
  2907. }
  2908. }
  2909. }
  2910. attr {
  2911. key: "transpose_a"
  2912. value {
  2913. b: false
  2914. }
  2915. }
  2916. attr {
  2917. key: "transpose_b"
  2918. value {
  2919. b: false
  2920. }
  2921. }
  2922. }
  2923. node {
  2924. name: "layer2_1/add"
  2925. op: "Add"
  2926. input: "layer2_1/MatMul"
  2927. input: "layer2/biases/read"
  2928. attr {
  2929. key: "T"
  2930. value {
  2931. type: DT_FLOAT
  2932. }
  2933. }
  2934. attr {
  2935. key: "_output_shapes"
  2936. value {
  2937. list {
  2938. shape {
  2939. dim {
  2940. size: -1
  2941. }
  2942. dim {
  2943. size: 10
  2944. }
  2945. }
  2946. }
  2947. }
  2948. }
  2949. }
  2950. node {
  2951. name: "Variable/initial_value"
  2952. op: "Const"
  2953. attr {
  2954. key: "_output_shapes"
  2955. value {
  2956. list {
  2957. shape {
  2958. }
  2959. }
  2960. }
  2961. }
  2962. attr {
  2963. key: "dtype"
  2964. value {
  2965. type: DT_INT32
  2966. }
  2967. }
  2968. attr {
  2969. key: "value"
  2970. value {
  2971. tensor {
  2972. dtype: DT_INT32
  2973. tensor_shape {
  2974. }
  2975. int_val: 0
  2976. }
  2977. }
  2978. }
  2979. }
  2980. node {
  2981. name: "Variable"
  2982. op: "VariableV2"
  2983. attr {
  2984. key: "_output_shapes"
  2985. value {
  2986. list {
  2987. shape {
  2988. }
  2989. }
  2990. }
  2991. }
  2992. attr {
  2993. key: "container"
  2994. value {
  2995. s: ""
  2996. }
  2997. }
  2998. attr {
  2999. key: "dtype"
  3000. value {
  3001. type: DT_INT32
  3002. }
  3003. }
  3004. attr {
  3005. key: "shape"
  3006. value {
  3007. shape {
  3008. }
  3009. }
  3010. }
  3011. attr {
  3012. key: "shared_name"
  3013. value {
  3014. s: ""
  3015. }
  3016. }
  3017. }
  3018. node {
  3019. name: "Variable/Assign"
  3020. op: "Assign"
  3021. input: "Variable"
  3022. input: "Variable/initial_value"
  3023. attr {
  3024. key: "T"
  3025. value {
  3026. type: DT_INT32
  3027. }
  3028. }
  3029. attr {
  3030. key: "_class"
  3031. value {
  3032. list {
  3033. s: "loc:@Variable"
  3034. }
  3035. }
  3036. }
  3037. attr {
  3038. key: "_output_shapes"
  3039. value {
  3040. list {
  3041. shape {
  3042. }
  3043. }
  3044. }
  3045. }
  3046. attr {
  3047. key: "use_locking"
  3048. value {
  3049. b: true
  3050. }
  3051. }
  3052. attr {
  3053. key: "validate_shape"
  3054. value {
  3055. b: true
  3056. }
  3057. }
  3058. }
  3059. node {
  3060. name: "Variable/read"
  3061. op: "Identity"
  3062. input: "Variable"
  3063. attr {
  3064. key: "T"
  3065. value {
  3066. type: DT_INT32
  3067. }
  3068. }
  3069. attr {
  3070. key: "_class"
  3071. value {
  3072. list {
  3073. s: "loc:@Variable"
  3074. }
  3075. }
  3076. }
  3077. attr {
  3078. key: "_output_shapes"
  3079. value {
  3080. list {
  3081. shape {
  3082. }
  3083. }
  3084. }
  3085. }
  3086. }
  3087. node {
  3088. name: "IsVariableInitialized"
  3089. op: "IsVariableInitialized"
  3090. input: "layer1/weights"
  3091. attr {
  3092. key: "_class"
  3093. value {
  3094. list {
  3095. s: "loc:@layer1/weights"
  3096. }
  3097. }
  3098. }
  3099. attr {
  3100. key: "_output_shapes"
  3101. value {
  3102. list {
  3103. shape {
  3104. }
  3105. }
  3106. }
  3107. }
  3108. attr {
  3109. key: "dtype"
  3110. value {
  3111. type: DT_FLOAT
  3112. }
  3113. }
  3114. }
  3115. node {
  3116. name: "cond/Switch"
  3117. op: "Switch"
  3118. input: "IsVariableInitialized"
  3119. input: "IsVariableInitialized"
  3120. attr {
  3121. key: "T"
  3122. value {
  3123. type: DT_BOOL
  3124. }
  3125. }
  3126. attr {
  3127. key: "_output_shapes"
  3128. value {
  3129. list {
  3130. shape {
  3131. }
  3132. shape {
  3133. }
  3134. }
  3135. }
  3136. }
  3137. }
  3138. node {
  3139. name: "cond/switch_t"
  3140. op: "Identity"
  3141. input: "cond/Switch:1"
  3142. attr {
  3143. key: "T"
  3144. value {
  3145. type: DT_BOOL
  3146. }
  3147. }
  3148. attr {
  3149. key: "_output_shapes"
  3150. value {
  3151. list {
  3152. shape {
  3153. }
  3154. }
  3155. }
  3156. }
  3157. }
  3158. node {
  3159. name: "cond/switch_f"
  3160. op: "Identity"
  3161. input: "cond/Switch"
  3162. attr {
  3163. key: "T"
  3164. value {
  3165. type: DT_BOOL
  3166. }
  3167. }
  3168. attr {
  3169. key: "_output_shapes"
  3170. value {
  3171. list {
  3172. shape {
  3173. }
  3174. }
  3175. }
  3176. }
  3177. }
  3178. node {
  3179. name: "cond/pred_id"
  3180. op: "Identity"
  3181. input: "IsVariableInitialized"
  3182. attr {
  3183. key: "T"
  3184. value {
  3185. type: DT_BOOL
  3186. }
  3187. }
  3188. attr {
  3189. key: "_output_shapes"
  3190. value {
  3191. list {
  3192. shape {
  3193. }
  3194. }
  3195. }
  3196. }
  3197. }
  3198. node {
  3199. name: "cond/read"
  3200. op: "Identity"
  3201. input: "cond/read/Switch:1"
  3202. attr {
  3203. key: "T"
  3204. value {
  3205. type: DT_FLOAT
  3206. }
  3207. }
  3208. attr {
  3209. key: "_output_shapes"
  3210. value {
  3211. list {
  3212. shape {
  3213. dim {
  3214. size: 784
  3215. }
  3216. dim {
  3217. size: 500
  3218. }
  3219. }
  3220. }
  3221. }
  3222. }
  3223. }
  3224. node {
  3225. name: "cond/read/Switch"
  3226. op: "RefSwitch"
  3227. input: "layer1/weights"
  3228. input: "cond/pred_id"
  3229. attr {
  3230. key: "T"
  3231. value {
  3232. type: DT_FLOAT
  3233. }
  3234. }
  3235. attr {
  3236. key: "_class"
  3237. value {
  3238. list {
  3239. s: "loc:@layer1/weights"
  3240. }
  3241. }
  3242. }
  3243. attr {
  3244. key: "_output_shapes"
  3245. value {
  3246. list {
  3247. shape {
  3248. dim {
  3249. size: 784
  3250. }
  3251. dim {
  3252. size: 500
  3253. }
  3254. }
  3255. shape {
  3256. dim {
  3257. size: 784
  3258. }
  3259. dim {
  3260. size: 500
  3261. }
  3262. }
  3263. }
  3264. }
  3265. }
  3266. }
  3267. node {
  3268. name: "cond/Switch_1"
  3269. op: "Switch"
  3270. input: "layer1/weights/Initializer/truncated_normal"
  3271. input: "cond/pred_id"
  3272. attr {
  3273. key: "T"
  3274. value {
  3275. type: DT_FLOAT
  3276. }
  3277. }
  3278. attr {
  3279. key: "_class"
  3280. value {
  3281. list {
  3282. s: "loc:@layer1/weights"
  3283. }
  3284. }
  3285. }
  3286. attr {
  3287. key: "_output_shapes"
  3288. value {
  3289. list {
  3290. shape {
  3291. dim {
  3292. size: 784
  3293. }
  3294. dim {
  3295. size: 500
  3296. }
  3297. }
  3298. shape {
  3299. dim {
  3300. size: 784
  3301. }
  3302. dim {
  3303. size: 500
  3304. }
  3305. }
  3306. }
  3307. }
  3308. }
  3309. }
  3310. node {
  3311. name: "cond/Merge"
  3312. op: "Merge"
  3313. input: "cond/Switch_1"
  3314. input: "cond/read"
  3315. attr {
  3316. key: "N"
  3317. value {
  3318. i: 2
  3319. }
  3320. }
  3321. attr {
  3322. key: "T"
  3323. value {
  3324. type: DT_FLOAT
  3325. }
  3326. }
  3327. attr {
  3328. key: "_output_shapes"
  3329. value {
  3330. list {
  3331. shape {
  3332. dim {
  3333. size: 784
  3334. }
  3335. dim {
  3336. size: 500
  3337. }
  3338. }
  3339. shape {
  3340. }
  3341. }
  3342. }
  3343. }
  3344. }
  3345. node {
  3346. name: "layer1/weights/ExponentialMovingAverage"
  3347. op: "VariableV2"
  3348. attr {
  3349. key: "_class"
  3350. value {
  3351. list {
  3352. s: "loc:@layer1/weights"
  3353. }
  3354. }
  3355. }
  3356. attr {
  3357. key: "_output_shapes"
  3358. value {
  3359. list {
  3360. shape {
  3361. dim {
  3362. size: 784
  3363. }
  3364. dim {
  3365. size: 500
  3366. }
  3367. }
  3368. }
  3369. }
  3370. }
  3371. attr {
  3372. key: "container"
  3373. value {
  3374. s: ""
  3375. }
  3376. }
  3377. attr {
  3378. key: "dtype"
  3379. value {
  3380. type: DT_FLOAT
  3381. }
  3382. }
  3383. attr {
  3384. key: "shape"
  3385. value {
  3386. shape {
  3387. dim {
  3388. size: 784
  3389. }
  3390. dim {
  3391. size: 500
  3392. }
  3393. }
  3394. }
  3395. }
  3396. attr {
  3397. key: "shared_name"
  3398. value {
  3399. s: ""
  3400. }
  3401. }
  3402. }
  3403. node {
  3404. name: "layer1/weights/ExponentialMovingAverage/IsVariableInitialized"
  3405. op: "IsVariableInitialized"
  3406. input: "layer1/weights"
  3407. attr {
  3408. key: "_class"
  3409. value {
  3410. list {
  3411. s: "loc:@layer1/weights"
  3412. }
  3413. }
  3414. }
  3415. attr {
  3416. key: "_output_shapes"
  3417. value {
  3418. list {
  3419. shape {
  3420. }
  3421. }
  3422. }
  3423. }
  3424. attr {
  3425. key: "dtype"
  3426. value {
  3427. type: DT_FLOAT
  3428. }
  3429. }
  3430. }
  3431. node {
  3432. name: "layer1/weights/ExponentialMovingAverage/cond/Switch"
  3433. op: "Switch"
  3434. input: "layer1/weights/ExponentialMovingAverage/IsVariableInitialized"
  3435. input: "layer1/weights/ExponentialMovingAverage/IsVariableInitialized"
  3436. attr {
  3437. key: "T"
  3438. value {
  3439. type: DT_BOOL
  3440. }
  3441. }
  3442. attr {
  3443. key: "_class"
  3444. value {
  3445. list {
  3446. s: "loc:@layer1/weights"
  3447. }
  3448. }
  3449. }
  3450. attr {
  3451. key: "_output_shapes"
  3452. value {
  3453. list {
  3454. shape {
  3455. }
  3456. shape {
  3457. }
  3458. }
  3459. }
  3460. }
  3461. }
  3462. node {
  3463. name: "layer1/weights/ExponentialMovingAverage/cond/switch_t"
  3464. op: "Identity"
  3465. input: "layer1/weights/ExponentialMovingAverage/cond/Switch:1"
  3466. attr {
  3467. key: "T"
  3468. value {
  3469. type: DT_BOOL
  3470. }
  3471. }
  3472. attr {
  3473. key: "_class"
  3474. value {
  3475. list {
  3476. s: "loc:@layer1/weights"
  3477. }
  3478. }
  3479. }
  3480. attr {
  3481. key: "_output_shapes"
  3482. value {
  3483. list {
  3484. shape {
  3485. }
  3486. }
  3487. }
  3488. }
  3489. }
  3490. node {
  3491. name: "layer1/weights/ExponentialMovingAverage/cond/switch_f"
  3492. op: "Identity"
  3493. input: "layer1/weights/ExponentialMovingAverage/cond/Switch"
  3494. attr {
  3495. key: "T"
  3496. value {
  3497. type: DT_BOOL
  3498. }
  3499. }
  3500. attr {
  3501. key: "_class"
  3502. value {
  3503. list {
  3504. s: "loc:@layer1/weights"
  3505. }
  3506. }
  3507. }
  3508. attr {
  3509. key: "_output_shapes"
  3510. value {
  3511. list {
  3512. shape {
  3513. }
  3514. }
  3515. }
  3516. }
  3517. }
  3518. node {
  3519. name: "layer1/weights/ExponentialMovingAverage/cond/pred_id"
  3520. op: "Identity"
  3521. input: "layer1/weights/ExponentialMovingAverage/IsVariableInitialized"
  3522. attr {
  3523. key: "T"
  3524. value {
  3525. type: DT_BOOL
  3526. }
  3527. }
  3528. attr {
  3529. key: "_class"
  3530. value {
  3531. list {
  3532. s: "loc:@layer1/weights"
  3533. }
  3534. }
  3535. }
  3536. attr {
  3537. key: "_output_shapes"
  3538. value {
  3539. list {
  3540. shape {
  3541. }
  3542. }
  3543. }
  3544. }
  3545. }
  3546. node {
  3547. name: "layer1/weights/ExponentialMovingAverage/cond/read"
  3548. op: "Identity"
  3549. input: "layer1/weights/ExponentialMovingAverage/cond/read/Switch:1"
  3550. attr {
  3551. key: "T"
  3552. value {
  3553. type: DT_FLOAT
  3554. }
  3555. }
  3556. attr {
  3557. key: "_class"
  3558. value {
  3559. list {
  3560. s: "loc:@layer1/weights"
  3561. }
  3562. }
  3563. }
  3564. attr {
  3565. key: "_output_shapes"
  3566. value {
  3567. list {
  3568. shape {
  3569. dim {
  3570. size: 784
  3571. }
  3572. dim {
  3573. size: 500
  3574. }
  3575. }
  3576. }
  3577. }
  3578. }
  3579. }
  3580. node {
  3581. name: "layer1/weights/ExponentialMovingAverage/cond/read/Switch"
  3582. op: "RefSwitch"
  3583. input: "layer1/weights"
  3584. input: "layer1/weights/ExponentialMovingAverage/cond/pred_id"
  3585. attr {
  3586. key: "T"
  3587. value {
  3588. type: DT_FLOAT
  3589. }
  3590. }
  3591. attr {
  3592. key: "_class"
  3593. value {
  3594. list {
  3595. s: "loc:@layer1/weights"
  3596. }
  3597. }
  3598. }
  3599. attr {
  3600. key: "_output_shapes"
  3601. value {
  3602. list {
  3603. shape {
  3604. dim {
  3605. size: 784
  3606. }
  3607. dim {
  3608. size: 500
  3609. }
  3610. }
  3611. shape {
  3612. dim {
  3613. size: 784
  3614. }
  3615. dim {
  3616. size: 500
  3617. }
  3618. }
  3619. }
  3620. }
  3621. }
  3622. }
  3623. node {
  3624. name: "layer1/weights/ExponentialMovingAverage/cond/Switch_1"
  3625. op: "Switch"
  3626. input: "layer1/weights/Initializer/truncated_normal"
  3627. input: "layer1/weights/ExponentialMovingAverage/cond/pred_id"
  3628. attr {
  3629. key: "T"
  3630. value {
  3631. type: DT_FLOAT
  3632. }
  3633. }
  3634. attr {
  3635. key: "_class"
  3636. value {
  3637. list {
  3638. s: "loc:@layer1/weights"
  3639. }
  3640. }
  3641. }
  3642. attr {
  3643. key: "_output_shapes"
  3644. value {
  3645. list {
  3646. shape {
  3647. dim {
  3648. size: 784
  3649. }
  3650. dim {
  3651. size: 500
  3652. }
  3653. }
  3654. shape {
  3655. dim {
  3656. size: 784
  3657. }
  3658. dim {
  3659. size: 500
  3660. }
  3661. }
  3662. }
  3663. }
  3664. }
  3665. }
  3666. node {
  3667. name: "layer1/weights/ExponentialMovingAverage/cond/Merge"
  3668. op: "Merge"
  3669. input: "layer1/weights/ExponentialMovingAverage/cond/Switch_1"
  3670. input: "layer1/weights/ExponentialMovingAverage/cond/read"
  3671. attr {
  3672. key: "N"
  3673. value {
  3674. i: 2
  3675. }
  3676. }
  3677. attr {
  3678. key: "T"
  3679. value {
  3680. type: DT_FLOAT
  3681. }
  3682. }
  3683. attr {
  3684. key: "_class"
  3685. value {
  3686. list {
  3687. s: "loc:@layer1/weights"
  3688. }
  3689. }
  3690. }
  3691. attr {
  3692. key: "_output_shapes"
  3693. value {
  3694. list {
  3695. shape {
  3696. dim {
  3697. size: 784
  3698. }
  3699. dim {
  3700. size: 500
  3701. }
  3702. }
  3703. shape {
  3704. }
  3705. }
  3706. }
  3707. }
  3708. }
  3709. node {
  3710. name: "layer1/weights/ExponentialMovingAverage/cond/read/Switch_layer1/weights/ExponentialMovingAverage_0"
  3711. op: "Switch"
  3712. input: "layer1/weights/ExponentialMovingAverage/cond/Merge"
  3713. input: "cond/pred_id"
  3714. attr {
  3715. key: "T"
  3716. value {
  3717. type: DT_FLOAT
  3718. }
  3719. }
  3720. attr {
  3721. key: "_class"
  3722. value {
  3723. list {
  3724. s: "loc:@layer1/weights"
  3725. }
  3726. }
  3727. }
  3728. attr {
  3729. key: "_output_shapes"
  3730. value {
  3731. list {
  3732. shape {
  3733. dim {
  3734. size: 784
  3735. }
  3736. dim {
  3737. size: 500
  3738. }
  3739. }
  3740. shape {
  3741. dim {
  3742. size: 784
  3743. }
  3744. dim {
  3745. size: 500
  3746. }
  3747. }
  3748. }
  3749. }
  3750. }
  3751. }
  3752. node {
  3753. name: "layer1/weights/ExponentialMovingAverage/cond/read_layer1/weights/ExponentialMovingAverage_0"
  3754. op: "Identity"
  3755. input: "layer1/weights/ExponentialMovingAverage/cond/read/Switch_layer1/weights/ExponentialMovingAverage_0:1"
  3756. attr {
  3757. key: "T"
  3758. value {
  3759. type: DT_FLOAT
  3760. }
  3761. }
  3762. attr {
  3763. key: "_class"
  3764. value {
  3765. list {
  3766. s: "loc:@layer1/weights"
  3767. }
  3768. }
  3769. }
  3770. attr {
  3771. key: "_output_shapes"
  3772. value {
  3773. list {
  3774. shape {
  3775. dim {
  3776. size: 784
  3777. }
  3778. dim {
  3779. size: 500
  3780. }
  3781. }
  3782. }
  3783. }
  3784. }
  3785. }
  3786. node {
  3787. name: "layer1/weights/ExponentialMovingAverage/cond/Merge_layer1/weights/ExponentialMovingAverage_0"
  3788. op: "Merge"
  3789. input: "cond/Switch_1"
  3790. input: "layer1/weights/ExponentialMovingAverage/cond/read_layer1/weights/ExponentialMovingAverage_0"
  3791. attr {
  3792. key: "N"
  3793. value {
  3794. i: 2
  3795. }
  3796. }
  3797. attr {
  3798. key: "T"
  3799. value {
  3800. type: DT_FLOAT
  3801. }
  3802. }
  3803. attr {
  3804. key: "_class"
  3805. value {
  3806. list {
  3807. s: "loc:@layer1/weights"
  3808. }
  3809. }
  3810. }
  3811. attr {
  3812. key: "_output_shapes"
  3813. value {
  3814. list {
  3815. shape {
  3816. dim {
  3817. size: 784
  3818. }
  3819. dim {
  3820. size: 500
  3821. }
  3822. }
  3823. shape {
  3824. }
  3825. }
  3826. }
  3827. }
  3828. }
  3829. node {
  3830. name: "layer1/weights/ExponentialMovingAverage/Assign"
  3831. op: "Assign"
  3832. input: "layer1/weights/ExponentialMovingAverage"
  3833. input: "layer1/weights/ExponentialMovingAverage/cond/Merge_layer1/weights/ExponentialMovingAverage_0"
  3834. attr {
  3835. key: "T"
  3836. value {
  3837. type: DT_FLOAT
  3838. }
  3839. }
  3840. attr {
  3841. key: "_class"
  3842. value {
  3843. list {
  3844. s: "loc:@layer1/weights"
  3845. }
  3846. }
  3847. }
  3848. attr {
  3849. key: "_output_shapes"
  3850. value {
  3851. list {
  3852. shape {
  3853. dim {
  3854. size: 784
  3855. }
  3856. dim {
  3857. size: 500
  3858. }
  3859. }
  3860. }
  3861. }
  3862. }
  3863. attr {
  3864. key: "use_locking"
  3865. value {
  3866. b: true
  3867. }
  3868. }
  3869. attr {
  3870. key: "validate_shape"
  3871. value {
  3872. b: true
  3873. }
  3874. }
  3875. }
  3876. node {
  3877. name: "layer1/weights/ExponentialMovingAverage/read"
  3878. op: "Identity"
  3879. input: "layer1/weights/ExponentialMovingAverage"
  3880. attr {
  3881. key: "T"
  3882. value {
  3883. type: DT_FLOAT
  3884. }
  3885. }
  3886. attr {
  3887. key: "_class"
  3888. value {
  3889. list {
  3890. s: "loc:@layer1/weights"
  3891. }
  3892. }
  3893. }
  3894. attr {
  3895. key: "_output_shapes"
  3896. value {
  3897. list {
  3898. shape {
  3899. dim {
  3900. size: 784
  3901. }
  3902. dim {
  3903. size: 500
  3904. }
  3905. }
  3906. }
  3907. }
  3908. }
  3909. }
  3910. node {
  3911. name: "IsVariableInitialized_1"
  3912. op: "IsVariableInitialized"
  3913. input: "layer1/biases"
  3914. attr {
  3915. key: "_class"
  3916. value {
  3917. list {
  3918. s: "loc:@layer1/biases"
  3919. }
  3920. }
  3921. }
  3922. attr {
  3923. key: "_output_shapes"
  3924. value {
  3925. list {
  3926. shape {
  3927. }
  3928. }
  3929. }
  3930. }
  3931. attr {
  3932. key: "dtype"
  3933. value {
  3934. type: DT_FLOAT
  3935. }
  3936. }
  3937. }
  3938. node {
  3939. name: "cond_1/Switch"
  3940. op: "Switch"
  3941. input: "IsVariableInitialized_1"
  3942. input: "IsVariableInitialized_1"
  3943. attr {
  3944. key: "T"
  3945. value {
  3946. type: DT_BOOL
  3947. }
  3948. }
  3949. attr {
  3950. key: "_output_shapes"
  3951. value {
  3952. list {
  3953. shape {
  3954. }
  3955. shape {
  3956. }
  3957. }
  3958. }
  3959. }
  3960. }
  3961. node {
  3962. name: "cond_1/switch_t"
  3963. op: "Identity"
  3964. input: "cond_1/Switch:1"
  3965. attr {
  3966. key: "T"
  3967. value {
  3968. type: DT_BOOL
  3969. }
  3970. }
  3971. attr {
  3972. key: "_output_shapes"
  3973. value {
  3974. list {
  3975. shape {
  3976. }
  3977. }
  3978. }
  3979. }
  3980. }
  3981. node {
  3982. name: "cond_1/switch_f"
  3983. op: "Identity"
  3984. input: "cond_1/Switch"
  3985. attr {
  3986. key: "T"
  3987. value {
  3988. type: DT_BOOL
  3989. }
  3990. }
  3991. attr {
  3992. key: "_output_shapes"
  3993. value {
  3994. list {
  3995. shape {
  3996. }
  3997. }
  3998. }
  3999. }
  4000. }
  4001. node {
  4002. name: "cond_1/pred_id"
  4003. op: "Identity"
  4004. input: "IsVariableInitialized_1"
  4005. attr {
  4006. key: "T"
  4007. value {
  4008. type: DT_BOOL
  4009. }
  4010. }
  4011. attr {
  4012. key: "_output_shapes"
  4013. value {
  4014. list {
  4015. shape {
  4016. }
  4017. }
  4018. }
  4019. }
  4020. }
  4021. node {
  4022. name: "cond_1/read"
  4023. op: "Identity"
  4024. input: "cond_1/read/Switch:1"
  4025. attr {
  4026. key: "T"
  4027. value {
  4028. type: DT_FLOAT
  4029. }
  4030. }
  4031. attr {
  4032. key: "_output_shapes"
  4033. value {
  4034. list {
  4035. shape {
  4036. dim {
  4037. size: 500
  4038. }
  4039. }
  4040. }
  4041. }
  4042. }
  4043. }
  4044. node {
  4045. name: "cond_1/read/Switch"
  4046. op: "RefSwitch"
  4047. input: "layer1/biases"
  4048. input: "cond_1/pred_id"
  4049. attr {
  4050. key: "T"
  4051. value {
  4052. type: DT_FLOAT
  4053. }
  4054. }
  4055. attr {
  4056. key: "_class"
  4057. value {
  4058. list {
  4059. s: "loc:@layer1/biases"
  4060. }
  4061. }
  4062. }
  4063. attr {
  4064. key: "_output_shapes"
  4065. value {
  4066. list {
  4067. shape {
  4068. dim {
  4069. size: 500
  4070. }
  4071. }
  4072. shape {
  4073. dim {
  4074. size: 500
  4075. }
  4076. }
  4077. }
  4078. }
  4079. }
  4080. }
  4081. node {
  4082. name: "cond_1/Switch_1"
  4083. op: "Switch"
  4084. input: "layer1/biases/Initializer/Const"
  4085. input: "cond_1/pred_id"
  4086. attr {
  4087. key: "T"
  4088. value {
  4089. type: DT_FLOAT
  4090. }
  4091. }
  4092. attr {
  4093. key: "_class"
  4094. value {
  4095. list {
  4096. s: "loc:@layer1/biases"
  4097. }
  4098. }
  4099. }
  4100. attr {
  4101. key: "_output_shapes"
  4102. value {
  4103. list {
  4104. shape {
  4105. dim {
  4106. size: 500
  4107. }
  4108. }
  4109. shape {
  4110. dim {
  4111. size: 500
  4112. }
  4113. }
  4114. }
  4115. }
  4116. }
  4117. }
  4118. node {
  4119. name: "cond_1/Merge"
  4120. op: "Merge"
  4121. input: "cond_1/Switch_1"
  4122. input: "cond_1/read"
  4123. attr {
  4124. key: "N"
  4125. value {
  4126. i: 2
  4127. }
  4128. }
  4129. attr {
  4130. key: "T"
  4131. value {
  4132. type: DT_FLOAT
  4133. }
  4134. }
  4135. attr {
  4136. key: "_output_shapes"
  4137. value {
  4138. list {
  4139. shape {
  4140. dim {
  4141. size: 500
  4142. }
  4143. }
  4144. shape {
  4145. }
  4146. }
  4147. }
  4148. }
  4149. }
  4150. node {
  4151. name: "layer1/biases/ExponentialMovingAverage"
  4152. op: "VariableV2"
  4153. attr {
  4154. key: "_class"
  4155. value {
  4156. list {
  4157. s: "loc:@layer1/biases"
  4158. }
  4159. }
  4160. }
  4161. attr {
  4162. key: "_output_shapes"
  4163. value {
  4164. list {
  4165. shape {
  4166. dim {
  4167. size: 500
  4168. }
  4169. }
  4170. }
  4171. }
  4172. }
  4173. attr {
  4174. key: "container"
  4175. value {
  4176. s: ""
  4177. }
  4178. }
  4179. attr {
  4180. key: "dtype"
  4181. value {
  4182. type: DT_FLOAT
  4183. }
  4184. }
  4185. attr {
  4186. key: "shape"
  4187. value {
  4188. shape {
  4189. dim {
  4190. size: 500
  4191. }
  4192. }
  4193. }
  4194. }
  4195. attr {
  4196. key: "shared_name"
  4197. value {
  4198. s: ""
  4199. }
  4200. }
  4201. }
  4202. node {
  4203. name: "layer1/biases/ExponentialMovingAverage/IsVariableInitialized"
  4204. op: "IsVariableInitialized"
  4205. input: "layer1/biases"
  4206. attr {
  4207. key: "_class"
  4208. value {
  4209. list {
  4210. s: "loc:@layer1/biases"
  4211. }
  4212. }
  4213. }
  4214. attr {
  4215. key: "_output_shapes"
  4216. value {
  4217. list {
  4218. shape {
  4219. }
  4220. }
  4221. }
  4222. }
  4223. attr {
  4224. key: "dtype"
  4225. value {
  4226. type: DT_FLOAT
  4227. }
  4228. }
  4229. }
  4230. node {
  4231. name: "layer1/biases/ExponentialMovingAverage/cond/Switch"
  4232. op: "Switch"
  4233. input: "layer1/biases/ExponentialMovingAverage/IsVariableInitialized"
  4234. input: "layer1/biases/ExponentialMovingAverage/IsVariableInitialized"
  4235. attr {
  4236. key: "T"
  4237. value {
  4238. type: DT_BOOL
  4239. }
  4240. }
  4241. attr {
  4242. key: "_class"
  4243. value {
  4244. list {
  4245. s: "loc:@layer1/biases"
  4246. }
  4247. }
  4248. }
  4249. attr {
  4250. key: "_output_shapes"
  4251. value {
  4252. list {
  4253. shape {
  4254. }
  4255. shape {
  4256. }
  4257. }
  4258. }
  4259. }
  4260. }
  4261. node {
  4262. name: "layer1/biases/ExponentialMovingAverage/cond/switch_t"
  4263. op: "Identity"
  4264. input: "layer1/biases/ExponentialMovingAverage/cond/Switch:1"
  4265. attr {
  4266. key: "T"
  4267. value {
  4268. type: DT_BOOL
  4269. }
  4270. }
  4271. attr {
  4272. key: "_class"
  4273. value {
  4274. list {
  4275. s: "loc:@layer1/biases"
  4276. }
  4277. }
  4278. }
  4279. attr {
  4280. key: "_output_shapes"
  4281. value {
  4282. list {
  4283. shape {
  4284. }
  4285. }
  4286. }
  4287. }
  4288. }
  4289. node {
  4290. name: "layer1/biases/ExponentialMovingAverage/cond/switch_f"
  4291. op: "Identity"
  4292. input: "layer1/biases/ExponentialMovingAverage/cond/Switch"
  4293. attr {
  4294. key: "T"
  4295. value {
  4296. type: DT_BOOL
  4297. }
  4298. }
  4299. attr {
  4300. key: "_class"
  4301. value {
  4302. list {
  4303. s: "loc:@layer1/biases"
  4304. }
  4305. }
  4306. }
  4307. attr {
  4308. key: "_output_shapes"
  4309. value {
  4310. list {
  4311. shape {
  4312. }
  4313. }
  4314. }
  4315. }
  4316. }
  4317. node {
  4318. name: "layer1/biases/ExponentialMovingAverage/cond/pred_id"
  4319. op: "Identity"
  4320. input: "layer1/biases/ExponentialMovingAverage/IsVariableInitialized"
  4321. attr {
  4322. key: "T"
  4323. value {
  4324. type: DT_BOOL
  4325. }
  4326. }
  4327. attr {
  4328. key: "_class"
  4329. value {
  4330. list {
  4331. s: "loc:@layer1/biases"
  4332. }
  4333. }
  4334. }
  4335. attr {
  4336. key: "_output_shapes"
  4337. value {
  4338. list {
  4339. shape {
  4340. }
  4341. }
  4342. }
  4343. }
  4344. }
  4345. node {
  4346. name: "layer1/biases/ExponentialMovingAverage/cond/read"
  4347. op: "Identity"
  4348. input: "layer1/biases/ExponentialMovingAverage/cond/read/Switch:1"
  4349. attr {
  4350. key: "T"
  4351. value {
  4352. type: DT_FLOAT
  4353. }
  4354. }
  4355. attr {
  4356. key: "_class"
  4357. value {
  4358. list {
  4359. s: "loc:@layer1/biases"
  4360. }
  4361. }
  4362. }
  4363. attr {
  4364. key: "_output_shapes"
  4365. value {
  4366. list {
  4367. shape {
  4368. dim {
  4369. size: 500
  4370. }
  4371. }
  4372. }
  4373. }
  4374. }
  4375. }
  4376. node {
  4377. name: "layer1/biases/ExponentialMovingAverage/cond/read/Switch"
  4378. op: "RefSwitch"
  4379. input: "layer1/biases"
  4380. input: "layer1/biases/ExponentialMovingAverage/cond/pred_id"
  4381. attr {
  4382. key: "T"
  4383. value {
  4384. type: DT_FLOAT
  4385. }
  4386. }
  4387. attr {
  4388. key: "_class"
  4389. value {
  4390. list {
  4391. s: "loc:@layer1/biases"
  4392. }
  4393. }
  4394. }
  4395. attr {
  4396. key: "_output_shapes"
  4397. value {
  4398. list {
  4399. shape {
  4400. dim {
  4401. size: 500
  4402. }
  4403. }
  4404. shape {
  4405. dim {
  4406. size: 500
  4407. }
  4408. }
  4409. }
  4410. }
  4411. }
  4412. }
  4413. node {
  4414. name: "layer1/biases/ExponentialMovingAverage/cond/Switch_1"
  4415. op: "Switch"
  4416. input: "layer1/biases/Initializer/Const"
  4417. input: "layer1/biases/ExponentialMovingAverage/cond/pred_id"
  4418. attr {
  4419. key: "T"
  4420. value {
  4421. type: DT_FLOAT
  4422. }
  4423. }
  4424. attr {
  4425. key: "_class"
  4426. value {
  4427. list {
  4428. s: "loc:@layer1/biases"
  4429. }
  4430. }
  4431. }
  4432. attr {
  4433. key: "_output_shapes"
  4434. value {
  4435. list {
  4436. shape {
  4437. dim {
  4438. size: 500
  4439. }
  4440. }
  4441. shape {
  4442. dim {
  4443. size: 500
  4444. }
  4445. }
  4446. }
  4447. }
  4448. }
  4449. }
  4450. node {
  4451. name: "layer1/biases/ExponentialMovingAverage/cond/Merge"
  4452. op: "Merge"
  4453. input: "layer1/biases/ExponentialMovingAverage/cond/Switch_1"
  4454. input: "layer1/biases/ExponentialMovingAverage/cond/read"
  4455. attr {
  4456. key: "N"
  4457. value {
  4458. i: 2
  4459. }
  4460. }
  4461. attr {
  4462. key: "T"
  4463. value {
  4464. type: DT_FLOAT
  4465. }
  4466. }
  4467. attr {
  4468. key: "_class"
  4469. value {
  4470. list {
  4471. s: "loc:@layer1/biases"
  4472. }
  4473. }
  4474. }
  4475. attr {
  4476. key: "_output_shapes"
  4477. value {
  4478. list {
  4479. shape {
  4480. dim {
  4481. size: 500
  4482. }
  4483. }
  4484. shape {
  4485. }
  4486. }
  4487. }
  4488. }
  4489. }
  4490. node {
  4491. name: "layer1/biases/ExponentialMovingAverage/cond_1/read/Switch_layer1/biases/ExponentialMovingAverage_0"
  4492. op: "Switch"
  4493. input: "layer1/biases/ExponentialMovingAverage/cond/Merge"
  4494. input: "cond_1/pred_id"
  4495. attr {
  4496. key: "T"
  4497. value {
  4498. type: DT_FLOAT
  4499. }
  4500. }
  4501. attr {
  4502. key: "_class"
  4503. value {
  4504. list {
  4505. s: "loc:@layer1/biases"
  4506. }
  4507. }
  4508. }
  4509. attr {
  4510. key: "_output_shapes"
  4511. value {
  4512. list {
  4513. shape {
  4514. dim {
  4515. size: 500
  4516. }
  4517. }
  4518. shape {
  4519. dim {
  4520. size: 500
  4521. }
  4522. }
  4523. }
  4524. }
  4525. }
  4526. }
  4527. node {
  4528. name: "layer1/biases/ExponentialMovingAverage/cond_1/read_layer1/biases/ExponentialMovingAverage_0"
  4529. op: "Identity"
  4530. input: "layer1/biases/ExponentialMovingAverage/cond_1/read/Switch_layer1/biases/ExponentialMovingAverage_0:1"
  4531. attr {
  4532. key: "T"
  4533. value {
  4534. type: DT_FLOAT
  4535. }
  4536. }
  4537. attr {
  4538. key: "_class"
  4539. value {
  4540. list {
  4541. s: "loc:@layer1/biases"
  4542. }
  4543. }
  4544. }
  4545. attr {
  4546. key: "_output_shapes"
  4547. value {
  4548. list {
  4549. shape {
  4550. dim {
  4551. size: 500
  4552. }
  4553. }
  4554. }
  4555. }
  4556. }
  4557. }
  4558. node {
  4559. name: "layer1/biases/ExponentialMovingAverage/cond_1/Merge_layer1/biases/ExponentialMovingAverage_0"
  4560. op: "Merge"
  4561. input: "cond_1/Switch_1"
  4562. input: "layer1/biases/ExponentialMovingAverage/cond_1/read_layer1/biases/ExponentialMovingAverage_0"
  4563. attr {
  4564. key: "N"
  4565. value {
  4566. i: 2
  4567. }
  4568. }
  4569. attr {
  4570. key: "T"
  4571. value {
  4572. type: DT_FLOAT
  4573. }
  4574. }
  4575. attr {
  4576. key: "_class"
  4577. value {
  4578. list {
  4579. s: "loc:@layer1/biases"
  4580. }
  4581. }
  4582. }
  4583. attr {
  4584. key: "_output_shapes"
  4585. value {
  4586. list {
  4587. shape {
  4588. dim {
  4589. size: 500
  4590. }
  4591. }
  4592. shape {
  4593. }
  4594. }
  4595. }
  4596. }
  4597. }
  4598. node {
  4599. name: "layer1/biases/ExponentialMovingAverage/Assign"
  4600. op: "Assign"
  4601. input: "layer1/biases/ExponentialMovingAverage"
  4602. input: "layer1/biases/ExponentialMovingAverage/cond_1/Merge_layer1/biases/ExponentialMovingAverage_0"
  4603. attr {
  4604. key: "T"
  4605. value {
  4606. type: DT_FLOAT
  4607. }
  4608. }
  4609. attr {
  4610. key: "_class"
  4611. value {
  4612. list {
  4613. s: "loc:@layer1/biases"
  4614. }
  4615. }
  4616. }
  4617. attr {
  4618. key: "_output_shapes"
  4619. value {
  4620. list {
  4621. shape {
  4622. dim {
  4623. size: 500
  4624. }
  4625. }
  4626. }
  4627. }
  4628. }
  4629. attr {
  4630. key: "use_locking"
  4631. value {
  4632. b: true
  4633. }
  4634. }
  4635. attr {
  4636. key: "validate_shape"
  4637. value {
  4638. b: true
  4639. }
  4640. }
  4641. }
  4642. node {
  4643. name: "layer1/biases/ExponentialMovingAverage/read"
  4644. op: "Identity"
  4645. input: "layer1/biases/ExponentialMovingAverage"
  4646. attr {
  4647. key: "T"
  4648. value {
  4649. type: DT_FLOAT
  4650. }
  4651. }
  4652. attr {
  4653. key: "_class"
  4654. value {
  4655. list {
  4656. s: "loc:@layer1/biases"
  4657. }
  4658. }
  4659. }
  4660. attr {
  4661. key: "_output_shapes"
  4662. value {
  4663. list {
  4664. shape {
  4665. dim {
  4666. size: 500
  4667. }
  4668. }
  4669. }
  4670. }
  4671. }
  4672. }
  4673. node {
  4674. name: "IsVariableInitialized_2"
  4675. op: "IsVariableInitialized"
  4676. input: "layer2/weights"
  4677. attr {
  4678. key: "_class"
  4679. value {
  4680. list {
  4681. s: "loc:@layer2/weights"
  4682. }
  4683. }
  4684. }
  4685. attr {
  4686. key: "_output_shapes"
  4687. value {
  4688. list {
  4689. shape {
  4690. }
  4691. }
  4692. }
  4693. }
  4694. attr {
  4695. key: "dtype"
  4696. value {
  4697. type: DT_FLOAT
  4698. }
  4699. }
  4700. }
  4701. node {
  4702. name: "cond_2/Switch"
  4703. op: "Switch"
  4704. input: "IsVariableInitialized_2"
  4705. input: "IsVariableInitialized_2"
  4706. attr {
  4707. key: "T"
  4708. value {
  4709. type: DT_BOOL
  4710. }
  4711. }
  4712. attr {
  4713. key: "_output_shapes"
  4714. value {
  4715. list {
  4716. shape {
  4717. }
  4718. shape {
  4719. }
  4720. }
  4721. }
  4722. }
  4723. }
  4724. node {
  4725. name: "cond_2/switch_t"
  4726. op: "Identity"
  4727. input: "cond_2/Switch:1"
  4728. attr {
  4729. key: "T"
  4730. value {
  4731. type: DT_BOOL
  4732. }
  4733. }
  4734. attr {
  4735. key: "_output_shapes"
  4736. value {
  4737. list {
  4738. shape {
  4739. }
  4740. }
  4741. }
  4742. }
  4743. }
  4744. node {
  4745. name: "cond_2/switch_f"
  4746. op: "Identity"
  4747. input: "cond_2/Switch"
  4748. attr {
  4749. key: "T"
  4750. value {
  4751. type: DT_BOOL
  4752. }
  4753. }
  4754. attr {
  4755. key: "_output_shapes"
  4756. value {
  4757. list {
  4758. shape {
  4759. }
  4760. }
  4761. }
  4762. }
  4763. }
  4764. node {
  4765. name: "cond_2/pred_id"
  4766. op: "Identity"
  4767. input: "IsVariableInitialized_2"
  4768. attr {
  4769. key: "T"
  4770. value {
  4771. type: DT_BOOL
  4772. }
  4773. }
  4774. attr {
  4775. key: "_output_shapes"
  4776. value {
  4777. list {
  4778. shape {
  4779. }
  4780. }
  4781. }
  4782. }
  4783. }
  4784. node {
  4785. name: "cond_2/read"
  4786. op: "Identity"
  4787. input: "cond_2/read/Switch:1"
  4788. attr {
  4789. key: "T"
  4790. value {
  4791. type: DT_FLOAT
  4792. }
  4793. }
  4794. attr {
  4795. key: "_output_shapes"
  4796. value {
  4797. list {
  4798. shape {
  4799. dim {
  4800. size: 500
  4801. }
  4802. dim {
  4803. size: 10
  4804. }
  4805. }
  4806. }
  4807. }
  4808. }
  4809. }
  4810. node {
  4811. name: "cond_2/read/Switch"
  4812. op: "RefSwitch"
  4813. input: "layer2/weights"
  4814. input: "cond_2/pred_id"
  4815. attr {
  4816. key: "T"
  4817. value {
  4818. type: DT_FLOAT
  4819. }
  4820. }
  4821. attr {
  4822. key: "_class"
  4823. value {
  4824. list {
  4825. s: "loc:@layer2/weights"
  4826. }
  4827. }
  4828. }
  4829. attr {
  4830. key: "_output_shapes"
  4831. value {
  4832. list {
  4833. shape {
  4834. dim {
  4835. size: 500
  4836. }
  4837. dim {
  4838. size: 10
  4839. }
  4840. }
  4841. shape {
  4842. dim {
  4843. size: 500
  4844. }
  4845. dim {
  4846. size: 10
  4847. }
  4848. }
  4849. }
  4850. }
  4851. }
  4852. }
  4853. node {
  4854. name: "cond_2/Switch_1"
  4855. op: "Switch"
  4856. input: "layer2/weights/Initializer/truncated_normal"
  4857. input: "cond_2/pred_id"
  4858. attr {
  4859. key: "T"
  4860. value {
  4861. type: DT_FLOAT
  4862. }
  4863. }
  4864. attr {
  4865. key: "_class"
  4866. value {
  4867. list {
  4868. s: "loc:@layer2/weights"
  4869. }
  4870. }
  4871. }
  4872. attr {
  4873. key: "_output_shapes"
  4874. value {
  4875. list {
  4876. shape {
  4877. dim {
  4878. size: 500
  4879. }
  4880. dim {
  4881. size: 10
  4882. }
  4883. }
  4884. shape {
  4885. dim {
  4886. size: 500
  4887. }
  4888. dim {
  4889. size: 10
  4890. }
  4891. }
  4892. }
  4893. }
  4894. }
  4895. }
  4896. node {
  4897. name: "cond_2/Merge"
  4898. op: "Merge"
  4899. input: "cond_2/Switch_1"
  4900. input: "cond_2/read"
  4901. attr {
  4902. key: "N"
  4903. value {
  4904. i: 2
  4905. }
  4906. }
  4907. attr {
  4908. key: "T"
  4909. value {
  4910. type: DT_FLOAT
  4911. }
  4912. }
  4913. attr {
  4914. key: "_output_shapes"
  4915. value {
  4916. list {
  4917. shape {
  4918. dim {
  4919. size: 500
  4920. }
  4921. dim {
  4922. size: 10
  4923. }
  4924. }
  4925. shape {
  4926. }
  4927. }
  4928. }
  4929. }
  4930. }
  4931. node {
  4932. name: "layer2/weights/ExponentialMovingAverage"
  4933. op: "VariableV2"
  4934. attr {
  4935. key: "_class"
  4936. value {
  4937. list {
  4938. s: "loc:@layer2/weights"
  4939. }
  4940. }
  4941. }
  4942. attr {
  4943. key: "_output_shapes"
  4944. value {
  4945. list {
  4946. shape {
  4947. dim {
  4948. size: 500
  4949. }
  4950. dim {
  4951. size: 10
  4952. }
  4953. }
  4954. }
  4955. }
  4956. }
  4957. attr {
  4958. key: "container"
  4959. value {
  4960. s: ""
  4961. }
  4962. }
  4963. attr {
  4964. key: "dtype"
  4965. value {
  4966. type: DT_FLOAT
  4967. }
  4968. }
  4969. attr {
  4970. key: "shape"
  4971. value {
  4972. shape {
  4973. dim {
  4974. size: 500
  4975. }
  4976. dim {
  4977. size: 10
  4978. }
  4979. }
  4980. }
  4981. }
  4982. attr {
  4983. key: "shared_name"
  4984. value {
  4985. s: ""
  4986. }
  4987. }
  4988. }
  4989. node {
  4990. name: "layer2/weights/ExponentialMovingAverage/IsVariableInitialized"
  4991. op: "IsVariableInitialized"
  4992. input: "layer2/weights"
  4993. attr {
  4994. key: "_class"
  4995. value {
  4996. list {
  4997. s: "loc:@layer2/weights"
  4998. }
  4999. }
  5000. }
  5001. attr {
  5002. key: "_output_shapes"
  5003. value {
  5004. list {
  5005. shape {
  5006. }
  5007. }
  5008. }
  5009. }
  5010. attr {
  5011. key: "dtype"
  5012. value {
  5013. type: DT_FLOAT
  5014. }
  5015. }
  5016. }
  5017. node {
  5018. name: "layer2/weights/ExponentialMovingAverage/cond/Switch"
  5019. op: "Switch"
  5020. input: "layer2/weights/ExponentialMovingAverage/IsVariableInitialized"
  5021. input: "layer2/weights/ExponentialMovingAverage/IsVariableInitialized"
  5022. attr {
  5023. key: "T"
  5024. value {
  5025. type: DT_BOOL
  5026. }
  5027. }
  5028. attr {
  5029. key: "_class"
  5030. value {
  5031. list {
  5032. s: "loc:@layer2/weights"
  5033. }
  5034. }
  5035. }
  5036. attr {
  5037. key: "_output_shapes"
  5038. value {
  5039. list {
  5040. shape {
  5041. }
  5042. shape {
  5043. }
  5044. }
  5045. }
  5046. }
  5047. }
  5048. node {
  5049. name: "layer2/weights/ExponentialMovingAverage/cond/switch_t"
  5050. op: "Identity"
  5051. input: "layer2/weights/ExponentialMovingAverage/cond/Switch:1"
  5052. attr {
  5053. key: "T"
  5054. value {
  5055. type: DT_BOOL
  5056. }
  5057. }
  5058. attr {
  5059. key: "_class"
  5060. value {
  5061. list {
  5062. s: "loc:@layer2/weights"
  5063. }
  5064. }
  5065. }
  5066. attr {
  5067. key: "_output_shapes"
  5068. value {
  5069. list {
  5070. shape {
  5071. }
  5072. }
  5073. }
  5074. }
  5075. }
  5076. node {
  5077. name: "layer2/weights/ExponentialMovingAverage/cond/switch_f"
  5078. op: "Identity"
  5079. input: "layer2/weights/ExponentialMovingAverage/cond/Switch"
  5080. attr {
  5081. key: "T"
  5082. value {
  5083. type: DT_BOOL
  5084. }
  5085. }
  5086. attr {
  5087. key: "_class"
  5088. value {
  5089. list {
  5090. s: "loc:@layer2/weights"
  5091. }
  5092. }
  5093. }
  5094. attr {
  5095. key: "_output_shapes"
  5096. value {
  5097. list {
  5098. shape {
  5099. }
  5100. }
  5101. }
  5102. }
  5103. }
  5104. node {
  5105. name: "layer2/weights/ExponentialMovingAverage/cond/pred_id"
  5106. op: "Identity"
  5107. input: "layer2/weights/ExponentialMovingAverage/IsVariableInitialized"
  5108. attr {
  5109. key: "T"
  5110. value {
  5111. type: DT_BOOL
  5112. }
  5113. }
  5114. attr {
  5115. key: "_class"
  5116. value {
  5117. list {
  5118. s: "loc:@layer2/weights"
  5119. }
  5120. }
  5121. }
  5122. attr {
  5123. key: "_output_shapes"
  5124. value {
  5125. list {
  5126. shape {
  5127. }
  5128. }
  5129. }
  5130. }
  5131. }
  5132. node {
  5133. name: "layer2/weights/ExponentialMovingAverage/cond/read"
  5134. op: "Identity"
  5135. input: "layer2/weights/ExponentialMovingAverage/cond/read/Switch:1"
  5136. attr {
  5137. key: "T"
  5138. value {
  5139. type: DT_FLOAT
  5140. }
  5141. }
  5142. attr {
  5143. key: "_class"
  5144. value {
  5145. list {
  5146. s: "loc:@layer2/weights"
  5147. }
  5148. }
  5149. }
  5150. attr {
  5151. key: "_output_shapes"
  5152. value {
  5153. list {
  5154. shape {
  5155. dim {
  5156. size: 500
  5157. }
  5158. dim {
  5159. size: 10
  5160. }
  5161. }
  5162. }
  5163. }
  5164. }
  5165. }
  5166. node {
  5167. name: "layer2/weights/ExponentialMovingAverage/cond/read/Switch"
  5168. op: "RefSwitch"
  5169. input: "layer2/weights"
  5170. input: "layer2/weights/ExponentialMovingAverage/cond/pred_id"
  5171. attr {
  5172. key: "T"
  5173. value {
  5174. type: DT_FLOAT
  5175. }
  5176. }
  5177. attr {
  5178. key: "_class"
  5179. value {
  5180. list {
  5181. s: "loc:@layer2/weights"
  5182. }
  5183. }
  5184. }
  5185. attr {
  5186. key: "_output_shapes"
  5187. value {
  5188. list {
  5189. shape {
  5190. dim {
  5191. size: 500
  5192. }
  5193. dim {
  5194. size: 10
  5195. }
  5196. }
  5197. shape {
  5198. dim {
  5199. size: 500
  5200. }
  5201. dim {
  5202. size: 10
  5203. }
  5204. }
  5205. }
  5206. }
  5207. }
  5208. }
  5209. node {
  5210. name: "layer2/weights/ExponentialMovingAverage/cond/Switch_1"
  5211. op: "Switch"
  5212. input: "layer2/weights/Initializer/truncated_normal"
  5213. input: "layer2/weights/ExponentialMovingAverage/cond/pred_id"
  5214. attr {
  5215. key: "T"
  5216. value {
  5217. type: DT_FLOAT
  5218. }
  5219. }
  5220. attr {
  5221. key: "_class"
  5222. value {
  5223. list {
  5224. s: "loc:@layer2/weights"
  5225. }
  5226. }
  5227. }
  5228. attr {
  5229. key: "_output_shapes"
  5230. value {
  5231. list {
  5232. shape {
  5233. dim {
  5234. size: 500
  5235. }
  5236. dim {
  5237. size: 10
  5238. }
  5239. }
  5240. shape {
  5241. dim {
  5242. size: 500
  5243. }
  5244. dim {
  5245. size: 10
  5246. }
  5247. }
  5248. }
  5249. }
  5250. }
  5251. }
  5252. node {
  5253. name: "layer2/weights/ExponentialMovingAverage/cond/Merge"
  5254. op: "Merge"
  5255. input: "layer2/weights/ExponentialMovingAverage/cond/Switch_1"
  5256. input: "layer2/weights/ExponentialMovingAverage/cond/read"
  5257. attr {
  5258. key: "N"
  5259. value {
  5260. i: 2
  5261. }
  5262. }
  5263. attr {
  5264. key: "T"
  5265. value {
  5266. type: DT_FLOAT
  5267. }
  5268. }
  5269. attr {
  5270. key: "_class"
  5271. value {
  5272. list {
  5273. s: "loc:@layer2/weights"
  5274. }
  5275. }
  5276. }
  5277. attr {
  5278. key: "_output_shapes"
  5279. value {
  5280. list {
  5281. shape {
  5282. dim {
  5283. size: 500
  5284. }
  5285. dim {
  5286. size: 10
  5287. }
  5288. }
  5289. shape {
  5290. }
  5291. }
  5292. }
  5293. }
  5294. }
  5295. node {
  5296. name: "layer2/weights/ExponentialMovingAverage/cond_2/read/Switch_layer2/weights/ExponentialMovingAverage_0"
  5297. op: "Switch"
  5298. input: "layer2/weights/ExponentialMovingAverage/cond/Merge"
  5299. input: "cond_2/pred_id"
  5300. attr {
  5301. key: "T"
  5302. value {
  5303. type: DT_FLOAT
  5304. }
  5305. }
  5306. attr {
  5307. key: "_class"
  5308. value {
  5309. list {
  5310. s: "loc:@layer2/weights"
  5311. }
  5312. }
  5313. }
  5314. attr {
  5315. key: "_output_shapes"
  5316. value {
  5317. list {
  5318. shape {
  5319. dim {
  5320. size: 500
  5321. }
  5322. dim {
  5323. size: 10
  5324. }
  5325. }
  5326. shape {
  5327. dim {
  5328. size: 500
  5329. }
  5330. dim {
  5331. size: 10
  5332. }
  5333. }
  5334. }
  5335. }
  5336. }
  5337. }
  5338. node {
  5339. name: "layer2/weights/ExponentialMovingAverage/cond_2/read_layer2/weights/ExponentialMovingAverage_0"
  5340. op: "Identity"
  5341. input: "layer2/weights/ExponentialMovingAverage/cond_2/read/Switch_layer2/weights/ExponentialMovingAverage_0:1"
  5342. attr {
  5343. key: "T"
  5344. value {
  5345. type: DT_FLOAT
  5346. }
  5347. }
  5348. attr {
  5349. key: "_class"
  5350. value {
  5351. list {
  5352. s: "loc:@layer2/weights"
  5353. }
  5354. }
  5355. }
  5356. attr {
  5357. key: "_output_shapes"
  5358. value {
  5359. list {
  5360. shape {
  5361. dim {
  5362. size: 500
  5363. }
  5364. dim {
  5365. size: 10
  5366. }
  5367. }
  5368. }
  5369. }
  5370. }
  5371. }
  5372. node {
  5373. name: "layer2/weights/ExponentialMovingAverage/cond_2/Merge_layer2/weights/ExponentialMovingAverage_0"
  5374. op: "Merge"
  5375. input: "cond_2/Switch_1"
  5376. input: "layer2/weights/ExponentialMovingAverage/cond_2/read_layer2/weights/ExponentialMovingAverage_0"
  5377. attr {
  5378. key: "N"
  5379. value {
  5380. i: 2
  5381. }
  5382. }
  5383. attr {
  5384. key: "T"
  5385. value {
  5386. type: DT_FLOAT
  5387. }
  5388. }
  5389. attr {
  5390. key: "_class"
  5391. value {
  5392. list {
  5393. s: "loc:@layer2/weights"
  5394. }
  5395. }
  5396. }
  5397. attr {
  5398. key: "_output_shapes"
  5399. value {
  5400. list {
  5401. shape {
  5402. dim {
  5403. size: 500
  5404. }
  5405. dim {
  5406. size: 10
  5407. }
  5408. }
  5409. shape {
  5410. }
  5411. }
  5412. }
  5413. }
  5414. }
  5415. node {
  5416. name: "layer2/weights/ExponentialMovingAverage/Assign"
  5417. op: "Assign"
  5418. input: "layer2/weights/ExponentialMovingAverage"
  5419. input: "layer2/weights/ExponentialMovingAverage/cond_2/Merge_layer2/weights/ExponentialMovingAverage_0"
  5420. attr {
  5421. key: "T"
  5422. value {
  5423. type: DT_FLOAT
  5424. }
  5425. }
  5426. attr {
  5427. key: "_class"
  5428. value {
  5429. list {
  5430. s: "loc:@layer2/weights"
  5431. }
  5432. }
  5433. }
  5434. attr {
  5435. key: "_output_shapes"
  5436. value {
  5437. list {
  5438. shape {
  5439. dim {
  5440. size: 500
  5441. }
  5442. dim {
  5443. size: 10
  5444. }
  5445. }
  5446. }
  5447. }
  5448. }
  5449. attr {
  5450. key: "use_locking"
  5451. value {
  5452. b: true
  5453. }
  5454. }
  5455. attr {
  5456. key: "validate_shape"
  5457. value {
  5458. b: true
  5459. }
  5460. }
  5461. }
  5462. node {
  5463. name: "layer2/weights/ExponentialMovingAverage/read"
  5464. op: "Identity"
  5465. input: "layer2/weights/ExponentialMovingAverage"
  5466. attr {
  5467. key: "T"
  5468. value {
  5469. type: DT_FLOAT
  5470. }
  5471. }
  5472. attr {
  5473. key: "_class"
  5474. value {
  5475. list {
  5476. s: "loc:@layer2/weights"
  5477. }
  5478. }
  5479. }
  5480. attr {
  5481. key: "_output_shapes"
  5482. value {
  5483. list {
  5484. shape {
  5485. dim {
  5486. size: 500
  5487. }
  5488. dim {
  5489. size: 10
  5490. }
  5491. }
  5492. }
  5493. }
  5494. }
  5495. }
  5496. node {
  5497. name: "IsVariableInitialized_3"
  5498. op: "IsVariableInitialized"
  5499. input: "layer2/biases"
  5500. attr {
  5501. key: "_class"
  5502. value {
  5503. list {
  5504. s: "loc:@layer2/biases"
  5505. }
  5506. }
  5507. }
  5508. attr {
  5509. key: "_output_shapes"
  5510. value {
  5511. list {
  5512. shape {
  5513. }
  5514. }
  5515. }
  5516. }
  5517. attr {
  5518. key: "dtype"
  5519. value {
  5520. type: DT_FLOAT
  5521. }
  5522. }
  5523. }
  5524. node {
  5525. name: "cond_3/Switch"
  5526. op: "Switch"
  5527. input: "IsVariableInitialized_3"
  5528. input: "IsVariableInitialized_3"
  5529. attr {
  5530. key: "T"
  5531. value {
  5532. type: DT_BOOL
  5533. }
  5534. }
  5535. attr {
  5536. key: "_output_shapes"
  5537. value {
  5538. list {
  5539. shape {
  5540. }
  5541. shape {
  5542. }
  5543. }
  5544. }
  5545. }
  5546. }
  5547. node {
  5548. name: "cond_3/switch_t"
  5549. op: "Identity"
  5550. input: "cond_3/Switch:1"
  5551. attr {
  5552. key: "T"
  5553. value {
  5554. type: DT_BOOL
  5555. }
  5556. }
  5557. attr {
  5558. key: "_output_shapes"
  5559. value {
  5560. list {
  5561. shape {
  5562. }
  5563. }
  5564. }
  5565. }
  5566. }
  5567. node {
  5568. name: "cond_3/switch_f"
  5569. op: "Identity"
  5570. input: "cond_3/Switch"
  5571. attr {
  5572. key: "T"
  5573. value {
  5574. type: DT_BOOL
  5575. }
  5576. }
  5577. attr {
  5578. key: "_output_shapes"
  5579. value {
  5580. list {
  5581. shape {
  5582. }
  5583. }
  5584. }
  5585. }
  5586. }
  5587. node {
  5588. name: "cond_3/pred_id"
  5589. op: "Identity"
  5590. input: "IsVariableInitialized_3"
  5591. attr {
  5592. key: "T"
  5593. value {
  5594. type: DT_BOOL
  5595. }
  5596. }
  5597. attr {
  5598. key: "_output_shapes"
  5599. value {
  5600. list {
  5601. shape {
  5602. }
  5603. }
  5604. }
  5605. }
  5606. }
  5607. node {
  5608. name: "cond_3/read"
  5609. op: "Identity"
  5610. input: "cond_3/read/Switch:1"
  5611. attr {
  5612. key: "T"
  5613. value {
  5614. type: DT_FLOAT
  5615. }
  5616. }
  5617. attr {
  5618. key: "_output_shapes"
  5619. value {
  5620. list {
  5621. shape {
  5622. dim {
  5623. size: 10
  5624. }
  5625. }
  5626. }
  5627. }
  5628. }
  5629. }
  5630. node {
  5631. name: "cond_3/read/Switch"
  5632. op: "RefSwitch"
  5633. input: "layer2/biases"
  5634. input: "cond_3/pred_id"
  5635. attr {
  5636. key: "T"
  5637. value {
  5638. type: DT_FLOAT
  5639. }
  5640. }
  5641. attr {
  5642. key: "_class"
  5643. value {
  5644. list {
  5645. s: "loc:@layer2/biases"
  5646. }
  5647. }
  5648. }
  5649. attr {
  5650. key: "_output_shapes"
  5651. value {
  5652. list {
  5653. shape {
  5654. dim {
  5655. size: 10
  5656. }
  5657. }
  5658. shape {
  5659. dim {
  5660. size: 10
  5661. }
  5662. }
  5663. }
  5664. }
  5665. }
  5666. }
  5667. node {
  5668. name: "cond_3/Switch_1"
  5669. op: "Switch"
  5670. input: "layer2/biases/Initializer/Const"
  5671. input: "cond_3/pred_id"
  5672. attr {
  5673. key: "T"
  5674. value {
  5675. type: DT_FLOAT
  5676. }
  5677. }
  5678. attr {
  5679. key: "_class"
  5680. value {
  5681. list {
  5682. s: "loc:@layer2/biases"
  5683. }
  5684. }
  5685. }
  5686. attr {
  5687. key: "_output_shapes"
  5688. value {
  5689. list {
  5690. shape {
  5691. dim {
  5692. size: 10
  5693. }
  5694. }
  5695. shape {
  5696. dim {
  5697. size: 10
  5698. }
  5699. }
  5700. }
  5701. }
  5702. }
  5703. }
  5704. node {
  5705. name: "cond_3/Merge"
  5706. op: "Merge"
  5707. input: "cond_3/Switch_1"
  5708. input: "cond_3/read"
  5709. attr {
  5710. key: "N"
  5711. value {
  5712. i: 2
  5713. }
  5714. }
  5715. attr {
  5716. key: "T"
  5717. value {
  5718. type: DT_FLOAT
  5719. }
  5720. }
  5721. attr {
  5722. key: "_output_shapes"
  5723. value {
  5724. list {
  5725. shape {
  5726. dim {
  5727. size: 10
  5728. }
  5729. }
  5730. shape {
  5731. }
  5732. }
  5733. }
  5734. }
  5735. }
  5736. node {
  5737. name: "layer2/biases/ExponentialMovingAverage"
  5738. op: "VariableV2"
  5739. attr {
  5740. key: "_class"
  5741. value {
  5742. list {
  5743. s: "loc:@layer2/biases"
  5744. }
  5745. }
  5746. }
  5747. attr {
  5748. key: "_output_shapes"
  5749. value {
  5750. list {
  5751. shape {
  5752. dim {
  5753. size: 10
  5754. }
  5755. }
  5756. }
  5757. }
  5758. }
  5759. attr {
  5760. key: "container"
  5761. value {
  5762. s: ""
  5763. }
  5764. }
  5765. attr {
  5766. key: "dtype"
  5767. value {
  5768. type: DT_FLOAT
  5769. }
  5770. }
  5771. attr {
  5772. key: "shape"
  5773. value {
  5774. shape {
  5775. dim {
  5776. size: 10
  5777. }
  5778. }
  5779. }
  5780. }
  5781. attr {
  5782. key: "shared_name"
  5783. value {
  5784. s: ""
  5785. }
  5786. }
  5787. }
  5788. node {
  5789. name: "layer2/biases/ExponentialMovingAverage/IsVariableInitialized"
  5790. op: "IsVariableInitialized"
  5791. input: "layer2/biases"
  5792. attr {
  5793. key: "_class"
  5794. value {
  5795. list {
  5796. s: "loc:@layer2/biases"
  5797. }
  5798. }
  5799. }
  5800. attr {
  5801. key: "_output_shapes"
  5802. value {
  5803. list {
  5804. shape {
  5805. }
  5806. }
  5807. }
  5808. }
  5809. attr {
  5810. key: "dtype"
  5811. value {
  5812. type: DT_FLOAT
  5813. }
  5814. }
  5815. }
  5816. node {
  5817. name: "layer2/biases/ExponentialMovingAverage/cond/Switch"
  5818. op: "Switch"
  5819. input: "layer2/biases/ExponentialMovingAverage/IsVariableInitialized"
  5820. input: "layer2/biases/ExponentialMovingAverage/IsVariableInitialized"
  5821. attr {
  5822. key: "T"
  5823. value {
  5824. type: DT_BOOL
  5825. }
  5826. }
  5827. attr {
  5828. key: "_class"
  5829. value {
  5830. list {
  5831. s: "loc:@layer2/biases"
  5832. }
  5833. }
  5834. }
  5835. attr {
  5836. key: "_output_shapes"
  5837. value {
  5838. list {
  5839. shape {
  5840. }
  5841. shape {
  5842. }
  5843. }
  5844. }
  5845. }
  5846. }
  5847. node {
  5848. name: "layer2/biases/ExponentialMovingAverage/cond/switch_t"
  5849. op: "Identity"
  5850. input: "layer2/biases/ExponentialMovingAverage/cond/Switch:1"
  5851. attr {
  5852. key: "T"
  5853. value {
  5854. type: DT_BOOL
  5855. }
  5856. }
  5857. attr {
  5858. key: "_class"
  5859. value {
  5860. list {
  5861. s: "loc:@layer2/biases"
  5862. }
  5863. }
  5864. }
  5865. attr {
  5866. key: "_output_shapes"
  5867. value {
  5868. list {
  5869. shape {
  5870. }
  5871. }
  5872. }
  5873. }
  5874. }
  5875. node {
  5876. name: "layer2/biases/ExponentialMovingAverage/cond/switch_f"
  5877. op: "Identity"
  5878. input: "layer2/biases/ExponentialMovingAverage/cond/Switch"
  5879. attr {
  5880. key: "T"
  5881. value {
  5882. type: DT_BOOL
  5883. }
  5884. }
  5885. attr {
  5886. key: "_class"
  5887. value {
  5888. list {
  5889. s: "loc:@layer2/biases"
  5890. }
  5891. }
  5892. }
  5893. attr {
  5894. key: "_output_shapes"
  5895. value {
  5896. list {
  5897. shape {
  5898. }
  5899. }
  5900. }
  5901. }
  5902. }
  5903. node {
  5904. name: "layer2/biases/ExponentialMovingAverage/cond/pred_id"
  5905. op: "Identity"
  5906. input: "layer2/biases/ExponentialMovingAverage/IsVariableInitialized"
  5907. attr {
  5908. key: "T"
  5909. value {
  5910. type: DT_BOOL
  5911. }
  5912. }
  5913. attr {
  5914. key: "_class"
  5915. value {
  5916. list {
  5917. s: "loc:@layer2/biases"
  5918. }
  5919. }
  5920. }
  5921. attr {
  5922. key: "_output_shapes"
  5923. value {
  5924. list {
  5925. shape {
  5926. }
  5927. }
  5928. }
  5929. }
  5930. }
  5931. node {
  5932. name: "layer2/biases/ExponentialMovingAverage/cond/read"
  5933. op: "Identity"
  5934. input: "layer2/biases/ExponentialMovingAverage/cond/read/Switch:1"
  5935. attr {
  5936. key: "T"
  5937. value {
  5938. type: DT_FLOAT
  5939. }
  5940. }
  5941. attr {
  5942. key: "_class"
  5943. value {
  5944. list {
  5945. s: "loc:@layer2/biases"
  5946. }
  5947. }
  5948. }
  5949. attr {
  5950. key: "_output_shapes"
  5951. value {
  5952. list {
  5953. shape {
  5954. dim {
  5955. size: 10
  5956. }
  5957. }
  5958. }
  5959. }
  5960. }
  5961. }
  5962. node {
  5963. name: "layer2/biases/ExponentialMovingAverage/cond/read/Switch"
  5964. op: "RefSwitch"
  5965. input: "layer2/biases"
  5966. input: "layer2/biases/ExponentialMovingAverage/cond/pred_id"
  5967. attr {
  5968. key: "T"
  5969. value {
  5970. type: DT_FLOAT
  5971. }
  5972. }
  5973. attr {
  5974. key: "_class"
  5975. value {
  5976. list {
  5977. s: "loc:@layer2/biases"
  5978. }
  5979. }
  5980. }
  5981. attr {
  5982. key: "_output_shapes"
  5983. value {
  5984. list {
  5985. shape {
  5986. dim {
  5987. size: 10
  5988. }
  5989. }
  5990. shape {
  5991. dim {
  5992. size: 10
  5993. }
  5994. }
  5995. }
  5996. }
  5997. }
  5998. }
  5999. node {
  6000. name: "layer2/biases/ExponentialMovingAverage/cond/Switch_1"
  6001. op: "Switch"
  6002. input: "layer2/biases/Initializer/Const"
  6003. input: "layer2/biases/ExponentialMovingAverage/cond/pred_id"
  6004. attr {
  6005. key: "T"
  6006. value {
  6007. type: DT_FLOAT
  6008. }
  6009. }
  6010. attr {
  6011. key: "_class"
  6012. value {
  6013. list {
  6014. s: "loc:@layer2/biases"
  6015. }
  6016. }
  6017. }
  6018. attr {
  6019. key: "_output_shapes"
  6020. value {
  6021. list {
  6022. shape {
  6023. dim {
  6024. size: 10
  6025. }
  6026. }
  6027. shape {
  6028. dim {
  6029. size: 10
  6030. }
  6031. }
  6032. }
  6033. }
  6034. }
  6035. }
  6036. node {
  6037. name: "layer2/biases/ExponentialMovingAverage/cond/Merge"
  6038. op: "Merge"
  6039. input: "layer2/biases/ExponentialMovingAverage/cond/Switch_1"
  6040. input: "layer2/biases/ExponentialMovingAverage/cond/read"
  6041. attr {
  6042. key: "N"
  6043. value {
  6044. i: 2
  6045. }
  6046. }
  6047. attr {
  6048. key: "T"
  6049. value {
  6050. type: DT_FLOAT
  6051. }
  6052. }
  6053. attr {
  6054. key: "_class"
  6055. value {
  6056. list {
  6057. s: "loc:@layer2/biases"
  6058. }
  6059. }
  6060. }
  6061. attr {
  6062. key: "_output_shapes"
  6063. value {
  6064. list {
  6065. shape {
  6066. dim {
  6067. size: 10
  6068. }
  6069. }
  6070. shape {
  6071. }
  6072. }
  6073. }
  6074. }
  6075. }
  6076. node {
  6077. name: "layer2/biases/ExponentialMovingAverage/cond_3/read/Switch_layer2/biases/ExponentialMovingAverage_0"
  6078. op: "Switch"
  6079. input: "layer2/biases/ExponentialMovingAverage/cond/Merge"
  6080. input: "cond_3/pred_id"
  6081. attr {
  6082. key: "T"
  6083. value {
  6084. type: DT_FLOAT
  6085. }
  6086. }
  6087. attr {
  6088. key: "_class"
  6089. value {
  6090. list {
  6091. s: "loc:@layer2/biases"
  6092. }
  6093. }
  6094. }
  6095. attr {
  6096. key: "_output_shapes"
  6097. value {
  6098. list {
  6099. shape {
  6100. dim {
  6101. size: 10
  6102. }
  6103. }
  6104. shape {
  6105. dim {
  6106. size: 10
  6107. }
  6108. }
  6109. }
  6110. }
  6111. }
  6112. }
  6113. node {
  6114. name: "layer2/biases/ExponentialMovingAverage/cond_3/read_layer2/biases/ExponentialMovingAverage_0"
  6115. op: "Identity"
  6116. input: "layer2/biases/ExponentialMovingAverage/cond_3/read/Switch_layer2/biases/ExponentialMovingAverage_0:1"
  6117. attr {
  6118. key: "T"
  6119. value {
  6120. type: DT_FLOAT
  6121. }
  6122. }
  6123. attr {
  6124. key: "_class"
  6125. value {
  6126. list {
  6127. s: "loc:@layer2/biases"
  6128. }
  6129. }
  6130. }
  6131. attr {
  6132. key: "_output_shapes"
  6133. value {
  6134. list {
  6135. shape {
  6136. dim {
  6137. size: 10
  6138. }
  6139. }
  6140. }
  6141. }
  6142. }
  6143. }
  6144. node {
  6145. name: "layer2/biases/ExponentialMovingAverage/cond_3/Merge_layer2/biases/ExponentialMovingAverage_0"
  6146. op: "Merge"
  6147. input: "cond_3/Switch_1"
  6148. input: "layer2/biases/ExponentialMovingAverage/cond_3/read_layer2/biases/ExponentialMovingAverage_0"
  6149. attr {
  6150. key: "N"
  6151. value {
  6152. i: 2
  6153. }
  6154. }
  6155. attr {
  6156. key: "T"
  6157. value {
  6158. type: DT_FLOAT
  6159. }
  6160. }
  6161. attr {
  6162. key: "_class"
  6163. value {
  6164. list {
  6165. s: "loc:@layer2/biases"
  6166. }
  6167. }
  6168. }
  6169. attr {
  6170. key: "_output_shapes"
  6171. value {
  6172. list {
  6173. shape {
  6174. dim {
  6175. size: 10
  6176. }
  6177. }
  6178. shape {
  6179. }
  6180. }
  6181. }
  6182. }
  6183. }
  6184. node {
  6185. name: "layer2/biases/ExponentialMovingAverage/Assign"
  6186. op: "Assign"
  6187. input: "layer2/biases/ExponentialMovingAverage"
  6188. input: "layer2/biases/ExponentialMovingAverage/cond_3/Merge_layer2/biases/ExponentialMovingAverage_0"
  6189. attr {
  6190. key: "T"
  6191. value {
  6192. type: DT_FLOAT
  6193. }
  6194. }
  6195. attr {
  6196. key: "_class"
  6197. value {
  6198. list {
  6199. s: "loc:@layer2/biases"
  6200. }
  6201. }
  6202. }
  6203. attr {
  6204. key: "_output_shapes"
  6205. value {
  6206. list {
  6207. shape {
  6208. dim {
  6209. size: 10
  6210. }
  6211. }
  6212. }
  6213. }
  6214. }
  6215. attr {
  6216. key: "use_locking"
  6217. value {
  6218. b: true
  6219. }
  6220. }
  6221. attr {
  6222. key: "validate_shape"
  6223. value {
  6224. b: true
  6225. }
  6226. }
  6227. }
  6228. node {
  6229. name: "layer2/biases/ExponentialMovingAverage/read"
  6230. op: "Identity"
  6231. input: "layer2/biases/ExponentialMovingAverage"
  6232. attr {
  6233. key: "T"
  6234. value {
  6235. type: DT_FLOAT
  6236. }
  6237. }
  6238. attr {
  6239. key: "_class"
  6240. value {
  6241. list {
  6242. s: "loc:@layer2/biases"
  6243. }
  6244. }
  6245. }
  6246. attr {
  6247. key: "_output_shapes"
  6248. value {
  6249. list {
  6250. shape {
  6251. dim {
  6252. size: 10
  6253. }
  6254. }
  6255. }
  6256. }
  6257. }
  6258. }
  6259. node {
  6260. name: "ExponentialMovingAverage/decay"
  6261. op: "Const"
  6262. attr {
  6263. key: "_output_shapes"
  6264. value {
  6265. list {
  6266. shape {
  6267. }
  6268. }
  6269. }
  6270. }
  6271. attr {
  6272. key: "dtype"
  6273. value {
  6274. type: DT_FLOAT
  6275. }
  6276. }
  6277. attr {
  6278. key: "value"
  6279. value {
  6280. tensor {
  6281. dtype: DT_FLOAT
  6282. tensor_shape {
  6283. }
  6284. float_val: 0.990000009537
  6285. }
  6286. }
  6287. }
  6288. }
  6289. node {
  6290. name: "ExponentialMovingAverage/num_updates"
  6291. op: "Cast"
  6292. input: "Variable/read"
  6293. attr {
  6294. key: "DstT"
  6295. value {
  6296. type: DT_FLOAT
  6297. }
  6298. }
  6299. attr {
  6300. key: "SrcT"
  6301. value {
  6302. type: DT_INT32
  6303. }
  6304. }
  6305. attr {
  6306. key: "_output_shapes"
  6307. value {
  6308. list {
  6309. shape {
  6310. }
  6311. }
  6312. }
  6313. }
  6314. }
  6315. node {
  6316. name: "ExponentialMovingAverage/add/x"
  6317. op: "Const"
  6318. attr {
  6319. key: "_output_shapes"
  6320. value {
  6321. list {
  6322. shape {
  6323. }
  6324. }
  6325. }
  6326. }
  6327. attr {
  6328. key: "dtype"
  6329. value {
  6330. type: DT_FLOAT
  6331. }
  6332. }
  6333. attr {
  6334. key: "value"
  6335. value {
  6336. tensor {
  6337. dtype: DT_FLOAT
  6338. tensor_shape {
  6339. }
  6340. float_val: 1.0
  6341. }
  6342. }
  6343. }
  6344. }
  6345. node {
  6346. name: "ExponentialMovingAverage/add"
  6347. op: "Add"
  6348. input: "ExponentialMovingAverage/add/x"
  6349. input: "ExponentialMovingAverage/num_updates"
  6350. attr {
  6351. key: "T"
  6352. value {
  6353. type: DT_FLOAT
  6354. }
  6355. }
  6356. attr {
  6357. key: "_output_shapes"
  6358. value {
  6359. list {
  6360. shape {
  6361. }
  6362. }
  6363. }
  6364. }
  6365. }
  6366. node {
  6367. name: "ExponentialMovingAverage/add_1/x"
  6368. op: "Const"
  6369. attr {
  6370. key: "_output_shapes"
  6371. value {
  6372. list {
  6373. shape {
  6374. }
  6375. }
  6376. }
  6377. }
  6378. attr {
  6379. key: "dtype"
  6380. value {
  6381. type: DT_FLOAT
  6382. }
  6383. }
  6384. attr {
  6385. key: "value"
  6386. value {
  6387. tensor {
  6388. dtype: DT_FLOAT
  6389. tensor_shape {
  6390. }
  6391. float_val: 10.0
  6392. }
  6393. }
  6394. }
  6395. }
  6396. node {
  6397. name: "ExponentialMovingAverage/add_1"
  6398. op: "Add"
  6399. input: "ExponentialMovingAverage/add_1/x"
  6400. input: "ExponentialMovingAverage/num_updates"
  6401. attr {
  6402. key: "T"
  6403. value {
  6404. type: DT_FLOAT
  6405. }
  6406. }
  6407. attr {
  6408. key: "_output_shapes"
  6409. value {
  6410. list {
  6411. shape {
  6412. }
  6413. }
  6414. }
  6415. }
  6416. }
  6417. node {
  6418. name: "ExponentialMovingAverage/truediv"
  6419. op: "RealDiv"
  6420. input: "ExponentialMovingAverage/add"
  6421. input: "ExponentialMovingAverage/add_1"
  6422. attr {
  6423. key: "T"
  6424. value {
  6425. type: DT_FLOAT
  6426. }
  6427. }
  6428. attr {
  6429. key: "_output_shapes"
  6430. value {
  6431. list {
  6432. shape {
  6433. }
  6434. }
  6435. }
  6436. }
  6437. }
  6438. node {
  6439. name: "ExponentialMovingAverage/Minimum"
  6440. op: "Minimum"
  6441. input: "ExponentialMovingAverage/decay"
  6442. input: "ExponentialMovingAverage/truediv"
  6443. attr {
  6444. key: "T"
  6445. value {
  6446. type: DT_FLOAT
  6447. }
  6448. }
  6449. attr {
  6450. key: "_output_shapes"
  6451. value {
  6452. list {
  6453. shape {
  6454. }
  6455. }
  6456. }
  6457. }
  6458. }
  6459. node {
  6460. name: "ExponentialMovingAverage/AssignMovingAvg/sub/x"
  6461. op: "Const"
  6462. attr {
  6463. key: "_class"
  6464. value {
  6465. list {
  6466. s: "loc:@layer1/weights"
  6467. }
  6468. }
  6469. }
  6470. attr {
  6471. key: "_output_shapes"
  6472. value {
  6473. list {
  6474. shape {
  6475. }
  6476. }
  6477. }
  6478. }
  6479. attr {
  6480. key: "dtype"
  6481. value {
  6482. type: DT_FLOAT
  6483. }
  6484. }
  6485. attr {
  6486. key: "value"
  6487. value {
  6488. tensor {
  6489. dtype: DT_FLOAT
  6490. tensor_shape {
  6491. }
  6492. float_val: 1.0
  6493. }
  6494. }
  6495. }
  6496. }
  6497. node {
  6498. name: "ExponentialMovingAverage/AssignMovingAvg/sub"
  6499. op: "Sub"
  6500. input: "ExponentialMovingAverage/AssignMovingAvg/sub/x"
  6501. input: "ExponentialMovingAverage/Minimum"
  6502. attr {
  6503. key: "T"
  6504. value {
  6505. type: DT_FLOAT
  6506. }
  6507. }
  6508. attr {
  6509. key: "_class"
  6510. value {
  6511. list {
  6512. s: "loc:@layer1/weights"
  6513. }
  6514. }
  6515. }
  6516. attr {
  6517. key: "_output_shapes"
  6518. value {
  6519. list {
  6520. shape {
  6521. }
  6522. }
  6523. }
  6524. }
  6525. }
  6526. node {
  6527. name: "ExponentialMovingAverage/AssignMovingAvg/sub_1"
  6528. op: "Sub"
  6529. input: "layer1/weights/ExponentialMovingAverage/read"
  6530. input: "layer1/weights/read"
  6531. attr {
  6532. key: "T"
  6533. value {
  6534. type: DT_FLOAT
  6535. }
  6536. }
  6537. attr {
  6538. key: "_class"
  6539. value {
  6540. list {
  6541. s: "loc:@layer1/weights"
  6542. }
  6543. }
  6544. }
  6545. attr {
  6546. key: "_output_shapes"
  6547. value {
  6548. list {
  6549. shape {
  6550. dim {
  6551. size: 784
  6552. }
  6553. dim {
  6554. size: 500
  6555. }
  6556. }
  6557. }
  6558. }
  6559. }
  6560. }
  6561. node {
  6562. name: "ExponentialMovingAverage/AssignMovingAvg/mul"
  6563. op: "Mul"
  6564. input: "ExponentialMovingAverage/AssignMovingAvg/sub_1"
  6565. input: "ExponentialMovingAverage/AssignMovingAvg/sub"
  6566. attr {
  6567. key: "T"
  6568. value {
  6569. type: DT_FLOAT
  6570. }
  6571. }
  6572. attr {
  6573. key: "_class"
  6574. value {
  6575. list {
  6576. s: "loc:@layer1/weights"
  6577. }
  6578. }
  6579. }
  6580. attr {
  6581. key: "_output_shapes"
  6582. value {
  6583. list {
  6584. shape {
  6585. dim {
  6586. size: 784
  6587. }
  6588. dim {
  6589. size: 500
  6590. }
  6591. }
  6592. }
  6593. }
  6594. }
  6595. }
  6596. node {
  6597. name: "ExponentialMovingAverage/AssignMovingAvg"
  6598. op: "AssignSub"
  6599. input: "layer1/weights/ExponentialMovingAverage"
  6600. input: "ExponentialMovingAverage/AssignMovingAvg/mul"
  6601. attr {
  6602. key: "T"
  6603. value {
  6604. type: DT_FLOAT
  6605. }
  6606. }
  6607. attr {
  6608. key: "_class"
  6609. value {
  6610. list {
  6611. s: "loc:@layer1/weights"
  6612. }
  6613. }
  6614. }
  6615. attr {
  6616. key: "_output_shapes"
  6617. value {
  6618. list {
  6619. shape {
  6620. dim {
  6621. size: 784
  6622. }
  6623. dim {
  6624. size: 500
  6625. }
  6626. }
  6627. }
  6628. }
  6629. }
  6630. attr {
  6631. key: "use_locking"
  6632. value {
  6633. b: false
  6634. }
  6635. }
  6636. }
  6637. node {
  6638. name: "ExponentialMovingAverage/AssignMovingAvg_1/sub/x"
  6639. op: "Const"
  6640. attr {
  6641. key: "_class"
  6642. value {
  6643. list {
  6644. s: "loc:@layer1/biases"
  6645. }
  6646. }
  6647. }
  6648. attr {
  6649. key: "_output_shapes"
  6650. value {
  6651. list {
  6652. shape {
  6653. }
  6654. }
  6655. }
  6656. }
  6657. attr {
  6658. key: "dtype"
  6659. value {
  6660. type: DT_FLOAT
  6661. }
  6662. }
  6663. attr {
  6664. key: "value"
  6665. value {
  6666. tensor {
  6667. dtype: DT_FLOAT
  6668. tensor_shape {
  6669. }
  6670. float_val: 1.0
  6671. }
  6672. }
  6673. }
  6674. }
  6675. node {
  6676. name: "ExponentialMovingAverage/AssignMovingAvg_1/sub"
  6677. op: "Sub"
  6678. input: "ExponentialMovingAverage/AssignMovingAvg_1/sub/x"
  6679. input: "ExponentialMovingAverage/Minimum"
  6680. attr {
  6681. key: "T"
  6682. value {
  6683. type: DT_FLOAT
  6684. }
  6685. }
  6686. attr {
  6687. key: "_class"
  6688. value {
  6689. list {
  6690. s: "loc:@layer1/biases"
  6691. }
  6692. }
  6693. }
  6694. attr {
  6695. key: "_output_shapes"
  6696. value {
  6697. list {
  6698. shape {
  6699. }
  6700. }
  6701. }
  6702. }
  6703. }
  6704. node {
  6705. name: "ExponentialMovingAverage/AssignMovingAvg_1/sub_1"
  6706. op: "Sub"
  6707. input: "layer1/biases/ExponentialMovingAverage/read"
  6708. input: "layer1/biases/read"
  6709. attr {
  6710. key: "T"
  6711. value {
  6712. type: DT_FLOAT
  6713. }
  6714. }
  6715. attr {
  6716. key: "_class"
  6717. value {
  6718. list {
  6719. s: "loc:@layer1/biases"
  6720. }
  6721. }
  6722. }
  6723. attr {
  6724. key: "_output_shapes"
  6725. value {
  6726. list {
  6727. shape {
  6728. dim {
  6729. size: 500
  6730. }
  6731. }
  6732. }
  6733. }
  6734. }
  6735. }
  6736. node {
  6737. name: "ExponentialMovingAverage/AssignMovingAvg_1/mul"
  6738. op: "Mul"
  6739. input: "ExponentialMovingAverage/AssignMovingAvg_1/sub_1"
  6740. input: "ExponentialMovingAverage/AssignMovingAvg_1/sub"
  6741. attr {
  6742. key: "T"
  6743. value {
  6744. type: DT_FLOAT
  6745. }
  6746. }
  6747. attr {
  6748. key: "_class"
  6749. value {
  6750. list {
  6751. s: "loc:@layer1/biases"
  6752. }
  6753. }
  6754. }
  6755. attr {
  6756. key: "_output_shapes"
  6757. value {
  6758. list {
  6759. shape {
  6760. dim {
  6761. size: 500
  6762. }
  6763. }
  6764. }
  6765. }
  6766. }
  6767. }
  6768. node {
  6769. name: "ExponentialMovingAverage/AssignMovingAvg_1"
  6770. op: "AssignSub"
  6771. input: "layer1/biases/ExponentialMovingAverage"
  6772. input: "ExponentialMovingAverage/AssignMovingAvg_1/mul"
  6773. attr {
  6774. key: "T"
  6775. value {
  6776. type: DT_FLOAT
  6777. }
  6778. }
  6779. attr {
  6780. key: "_class"
  6781. value {
  6782. list {
  6783. s: "loc:@layer1/biases"
  6784. }
  6785. }
  6786. }
  6787. attr {
  6788. key: "_output_shapes"
  6789. value {
  6790. list {
  6791. shape {
  6792. dim {
  6793. size: 500
  6794. }
  6795. }
  6796. }
  6797. }
  6798. }
  6799. attr {
  6800. key: "use_locking"
  6801. value {
  6802. b: false
  6803. }
  6804. }
  6805. }
  6806. node {
  6807. name: "ExponentialMovingAverage/AssignMovingAvg_2/sub/x"
  6808. op: "Const"
  6809. attr {
  6810. key: "_class"
  6811. value {
  6812. list {
  6813. s: "loc:@layer2/weights"
  6814. }
  6815. }
  6816. }
  6817. attr {
  6818. key: "_output_shapes"
  6819. value {
  6820. list {
  6821. shape {
  6822. }
  6823. }
  6824. }
  6825. }
  6826. attr {
  6827. key: "dtype"
  6828. value {
  6829. type: DT_FLOAT
  6830. }
  6831. }
  6832. attr {
  6833. key: "value"
  6834. value {
  6835. tensor {
  6836. dtype: DT_FLOAT
  6837. tensor_shape {
  6838. }
  6839. float_val: 1.0
  6840. }
  6841. }
  6842. }
  6843. }
  6844. node {
  6845. name: "ExponentialMovingAverage/AssignMovingAvg_2/sub"
  6846. op: "Sub"
  6847. input: "ExponentialMovingAverage/AssignMovingAvg_2/sub/x"
  6848. input: "ExponentialMovingAverage/Minimum"
  6849. attr {
  6850. key: "T"
  6851. value {
  6852. type: DT_FLOAT
  6853. }
  6854. }
  6855. attr {
  6856. key: "_class"
  6857. value {
  6858. list {
  6859. s: "loc:@layer2/weights"
  6860. }
  6861. }
  6862. }
  6863. attr {
  6864. key: "_output_shapes"
  6865. value {
  6866. list {
  6867. shape {
  6868. }
  6869. }
  6870. }
  6871. }
  6872. }
  6873. node {
  6874. name: "ExponentialMovingAverage/AssignMovingAvg_2/sub_1"
  6875. op: "Sub"
  6876. input: "layer2/weights/ExponentialMovingAverage/read"
  6877. input: "layer2/weights/read"
  6878. attr {
  6879. key: "T"
  6880. value {
  6881. type: DT_FLOAT
  6882. }
  6883. }
  6884. attr {
  6885. key: "_class"
  6886. value {
  6887. list {
  6888. s: "loc:@layer2/weights"
  6889. }
  6890. }
  6891. }
  6892. attr {
  6893. key: "_output_shapes"
  6894. value {
  6895. list {
  6896. shape {
  6897. dim {
  6898. size: 500
  6899. }
  6900. dim {
  6901. size: 10
  6902. }
  6903. }
  6904. }
  6905. }
  6906. }
  6907. }
  6908. node {
  6909. name: "ExponentialMovingAverage/AssignMovingAvg_2/mul"
  6910. op: "Mul"
  6911. input: "ExponentialMovingAverage/AssignMovingAvg_2/sub_1"
  6912. input: "ExponentialMovingAverage/AssignMovingAvg_2/sub"
  6913. attr {
  6914. key: "T"
  6915. value {
  6916. type: DT_FLOAT
  6917. }
  6918. }
  6919. attr {
  6920. key: "_class"
  6921. value {
  6922. list {
  6923. s: "loc:@layer2/weights"
  6924. }
  6925. }
  6926. }
  6927. attr {
  6928. key: "_output_shapes"
  6929. value {
  6930. list {
  6931. shape {
  6932. dim {
  6933. size: 500
  6934. }
  6935. dim {
  6936. size: 10
  6937. }
  6938. }
  6939. }
  6940. }
  6941. }
  6942. }
  6943. node {
  6944. name: "ExponentialMovingAverage/AssignMovingAvg_2"
  6945. op: "AssignSub"
  6946. input: "layer2/weights/ExponentialMovingAverage"
  6947. input: "ExponentialMovingAverage/AssignMovingAvg_2/mul"
  6948. attr {
  6949. key: "T"
  6950. value {
  6951. type: DT_FLOAT
  6952. }
  6953. }
  6954. attr {
  6955. key: "_class"
  6956. value {
  6957. list {
  6958. s: "loc:@layer2/weights"
  6959. }
  6960. }
  6961. }
  6962. attr {
  6963. key: "_output_shapes"
  6964. value {
  6965. list {
  6966. shape {
  6967. dim {
  6968. size: 500
  6969. }
  6970. dim {
  6971. size: 10
  6972. }
  6973. }
  6974. }
  6975. }
  6976. }
  6977. attr {
  6978. key: "use_locking"
  6979. value {
  6980. b: false
  6981. }
  6982. }
  6983. }
  6984. node {
  6985. name: "ExponentialMovingAverage/AssignMovingAvg_3/sub/x"
  6986. op: "Const"
  6987. attr {
  6988. key: "_class"
  6989. value {
  6990. list {
  6991. s: "loc:@layer2/biases"
  6992. }
  6993. }
  6994. }
  6995. attr {
  6996. key: "_output_shapes"
  6997. value {
  6998. list {
  6999. shape {
  7000. }
  7001. }
  7002. }
  7003. }
  7004. attr {
  7005. key: "dtype"
  7006. value {
  7007. type: DT_FLOAT
  7008. }
  7009. }
  7010. attr {
  7011. key: "value"
  7012. value {
  7013. tensor {
  7014. dtype: DT_FLOAT
  7015. tensor_shape {
  7016. }
  7017. float_val: 1.0
  7018. }
  7019. }
  7020. }
  7021. }
  7022. node {
  7023. name: "ExponentialMovingAverage/AssignMovingAvg_3/sub"
  7024. op: "Sub"
  7025. input: "ExponentialMovingAverage/AssignMovingAvg_3/sub/x"
  7026. input: "ExponentialMovingAverage/Minimum"
  7027. attr {
  7028. key: "T"
  7029. value {
  7030. type: DT_FLOAT
  7031. }
  7032. }
  7033. attr {
  7034. key: "_class"
  7035. value {
  7036. list {
  7037. s: "loc:@layer2/biases"
  7038. }
  7039. }
  7040. }
  7041. attr {
  7042. key: "_output_shapes"
  7043. value {
  7044. list {
  7045. shape {
  7046. }
  7047. }
  7048. }
  7049. }
  7050. }
  7051. node {
  7052. name: "ExponentialMovingAverage/AssignMovingAvg_3/sub_1"
  7053. op: "Sub"
  7054. input: "layer2/biases/ExponentialMovingAverage/read"
  7055. input: "layer2/biases/read"
  7056. attr {
  7057. key: "T"
  7058. value {
  7059. type: DT_FLOAT
  7060. }
  7061. }
  7062. attr {
  7063. key: "_class"
  7064. value {
  7065. list {
  7066. s: "loc:@layer2/biases"
  7067. }
  7068. }
  7069. }
  7070. attr {
  7071. key: "_output_shapes"
  7072. value {
  7073. list {
  7074. shape {
  7075. dim {
  7076. size: 10
  7077. }
  7078. }
  7079. }
  7080. }
  7081. }
  7082. }
  7083. node {
  7084. name: "ExponentialMovingAverage/AssignMovingAvg_3/mul"
  7085. op: "Mul"
  7086. input: "ExponentialMovingAverage/AssignMovingAvg_3/sub_1"
  7087. input: "ExponentialMovingAverage/AssignMovingAvg_3/sub"
  7088. attr {
  7089. key: "T"
  7090. value {
  7091. type: DT_FLOAT
  7092. }
  7093. }
  7094. attr {
  7095. key: "_class"
  7096. value {
  7097. list {
  7098. s: "loc:@layer2/biases"
  7099. }
  7100. }
  7101. }
  7102. attr {
  7103. key: "_output_shapes"
  7104. value {
  7105. list {
  7106. shape {
  7107. dim {
  7108. size: 10
  7109. }
  7110. }
  7111. }
  7112. }
  7113. }
  7114. }
  7115. node {
  7116. name: "ExponentialMovingAverage/AssignMovingAvg_3"
  7117. op: "AssignSub"
  7118. input: "layer2/biases/ExponentialMovingAverage"
  7119. input: "ExponentialMovingAverage/AssignMovingAvg_3/mul"
  7120. attr {
  7121. key: "T"
  7122. value {
  7123. type: DT_FLOAT
  7124. }
  7125. }
  7126. attr {
  7127. key: "_class"
  7128. value {
  7129. list {
  7130. s: "loc:@layer2/biases"
  7131. }
  7132. }
  7133. }
  7134. attr {
  7135. key: "_output_shapes"
  7136. value {
  7137. list {
  7138. shape {
  7139. dim {
  7140. size: 10
  7141. }
  7142. }
  7143. }
  7144. }
  7145. }
  7146. attr {
  7147. key: "use_locking"
  7148. value {
  7149. b: false
  7150. }
  7151. }
  7152. }
  7153. node {
  7154. name: "ExponentialMovingAverage"
  7155. op: "NoOp"
  7156. input: "^ExponentialMovingAverage/AssignMovingAvg"
  7157. input: "^ExponentialMovingAverage/AssignMovingAvg_1"
  7158. input: "^ExponentialMovingAverage/AssignMovingAvg_2"
  7159. input: "^ExponentialMovingAverage/AssignMovingAvg_3"
  7160. }
  7161. node {
  7162. name: "layer1_2/MatMul"
  7163. op: "MatMul"
  7164. input: "x-input"
  7165. input: "layer1/weights/ExponentialMovingAverage/read"
  7166. attr {
  7167. key: "T"
  7168. value {
  7169. type: DT_FLOAT
  7170. }
  7171. }
  7172. attr {
  7173. key: "_output_shapes"
  7174. value {
  7175. list {
  7176. shape {
  7177. dim {
  7178. size: -1
  7179. }
  7180. dim {
  7181. size: 500
  7182. }
  7183. }
  7184. }
  7185. }
  7186. }
  7187. attr {
  7188. key: "transpose_a"
  7189. value {
  7190. b: false
  7191. }
  7192. }
  7193. attr {
  7194. key: "transpose_b"
  7195. value {
  7196. b: false
  7197. }
  7198. }
  7199. }
  7200. node {
  7201. name: "layer1_2/add"
  7202. op: "Add"
  7203. input: "layer1_2/MatMul"
  7204. input: "layer1/biases/ExponentialMovingAverage/read"
  7205. attr {
  7206. key: "T"
  7207. value {
  7208. type: DT_FLOAT
  7209. }
  7210. }
  7211. attr {
  7212. key: "_output_shapes"
  7213. value {
  7214. list {
  7215. shape {
  7216. dim {
  7217. size: -1
  7218. }
  7219. dim {
  7220. size: 500
  7221. }
  7222. }
  7223. }
  7224. }
  7225. }
  7226. }
  7227. node {
  7228. name: "layer1_2/Relu"
  7229. op: "Relu"
  7230. input: "layer1_2/add"
  7231. attr {
  7232. key: "T"
  7233. value {
  7234. type: DT_FLOAT
  7235. }
  7236. }
  7237. attr {
  7238. key: "_output_shapes"
  7239. value {
  7240. list {
  7241. shape {
  7242. dim {
  7243. size: -1
  7244. }
  7245. dim {
  7246. size: 500
  7247. }
  7248. }
  7249. }
  7250. }
  7251. }
  7252. }
  7253. node {
  7254. name: "layer2_2/MatMul"
  7255. op: "MatMul"
  7256. input: "layer1_2/Relu"
  7257. input: "layer2/weights/ExponentialMovingAverage/read"
  7258. attr {
  7259. key: "T"
  7260. value {
  7261. type: DT_FLOAT
  7262. }
  7263. }
  7264. attr {
  7265. key: "_output_shapes"
  7266. value {
  7267. list {
  7268. shape {
  7269. dim {
  7270. size: -1
  7271. }
  7272. dim {
  7273. size: 10
  7274. }
  7275. }
  7276. }
  7277. }
  7278. }
  7279. attr {
  7280. key: "transpose_a"
  7281. value {
  7282. b: false
  7283. }
  7284. }
  7285. attr {
  7286. key: "transpose_b"
  7287. value {
  7288. b: false
  7289. }
  7290. }
  7291. }
  7292. node {
  7293. name: "layer2_2/add"
  7294. op: "Add"
  7295. input: "layer2_2/MatMul"
  7296. input: "layer2/biases/ExponentialMovingAverage/read"
  7297. attr {
  7298. key: "T"
  7299. value {
  7300. type: DT_FLOAT
  7301. }
  7302. }
  7303. attr {
  7304. key: "_output_shapes"
  7305. value {
  7306. list {
  7307. shape {
  7308. dim {
  7309. size: -1
  7310. }
  7311. dim {
  7312. size: 10
  7313. }
  7314. }
  7315. }
  7316. }
  7317. }
  7318. }
  7319. node {
  7320. name: "ArgMax/dimension"
  7321. op: "Const"
  7322. attr {
  7323. key: "_output_shapes"
  7324. value {
  7325. list {
  7326. shape {
  7327. }
  7328. }
  7329. }
  7330. }
  7331. attr {
  7332. key: "dtype"
  7333. value {
  7334. type: DT_INT32
  7335. }
  7336. }
  7337. attr {
  7338. key: "value"
  7339. value {
  7340. tensor {
  7341. dtype: DT_INT32
  7342. tensor_shape {
  7343. }
  7344. int_val: 1
  7345. }
  7346. }
  7347. }
  7348. }
  7349. node {
  7350. name: "ArgMax"
  7351. op: "ArgMax"
  7352. input: "layer2/add"
  7353. input: "ArgMax/dimension"
  7354. attr {
  7355. key: "T"
  7356. value {
  7357. type: DT_FLOAT
  7358. }
  7359. }
  7360. attr {
  7361. key: "Tidx"
  7362. value {
  7363. type: DT_INT32
  7364. }
  7365. }
  7366. attr {
  7367. key: "_output_shapes"
  7368. value {
  7369. list {
  7370. shape {
  7371. dim {
  7372. size: -1
  7373. }
  7374. }
  7375. }
  7376. }
  7377. }
  7378. attr {
  7379. key: "output_type"
  7380. value {
  7381. type: DT_INT64
  7382. }
  7383. }
  7384. }
  7385. node {
  7386. name: "SparseSoftmaxCrossEntropyWithLogits/Shape"
  7387. op: "Shape"
  7388. input: "ArgMax"
  7389. attr {
  7390. key: "T"
  7391. value {
  7392. type: DT_INT64
  7393. }
  7394. }
  7395. attr {
  7396. key: "_output_shapes"
  7397. value {
  7398. list {
  7399. shape {
  7400. dim {
  7401. size: 1
  7402. }
  7403. }
  7404. }
  7405. }
  7406. }
  7407. attr {
  7408. key: "out_type"
  7409. value {
  7410. type: DT_INT32
  7411. }
  7412. }
  7413. }
  7414. node {
  7415. name: "SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
  7416. op: "SparseSoftmaxCrossEntropyWithLogits"
  7417. input: "layer2_1/add"
  7418. input: "ArgMax"
  7419. attr {
  7420. key: "T"
  7421. value {
  7422. type: DT_FLOAT
  7423. }
  7424. }
  7425. attr {
  7426. key: "Tlabels"
  7427. value {
  7428. type: DT_INT64
  7429. }
  7430. }
  7431. attr {
  7432. key: "_output_shapes"
  7433. value {
  7434. list {
  7435. shape {
  7436. dim {
  7437. size: -1
  7438. }
  7439. }
  7440. shape {
  7441. dim {
  7442. size: -1
  7443. }
  7444. dim {
  7445. size: 10
  7446. }
  7447. }
  7448. }
  7449. }
  7450. }
  7451. }
  7452. node {
  7453. name: "Const"
  7454. op: "Const"
  7455. attr {
  7456. key: "_output_shapes"
  7457. value {
  7458. list {
  7459. shape {
  7460. dim {
  7461. size: 1
  7462. }
  7463. }
  7464. }
  7465. }
  7466. }
  7467. attr {
  7468. key: "dtype"
  7469. value {
  7470. type: DT_INT32
  7471. }
  7472. }
  7473. attr {
  7474. key: "value"
  7475. value {
  7476. tensor {
  7477. dtype: DT_INT32
  7478. tensor_shape {
  7479. dim {
  7480. size: 1
  7481. }
  7482. }
  7483. int_val: 0
  7484. }
  7485. }
  7486. }
  7487. }
  7488. node {
  7489. name: "Mean"
  7490. op: "Mean"
  7491. input: "SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
  7492. input: "Const"
  7493. attr {
  7494. key: "T"
  7495. value {
  7496. type: DT_FLOAT
  7497. }
  7498. }
  7499. attr {
  7500. key: "Tidx"
  7501. value {
  7502. type: DT_INT32
  7503. }
  7504. }
  7505. attr {
  7506. key: "_output_shapes"
  7507. value {
  7508. list {
  7509. shape {
  7510. }
  7511. }
  7512. }
  7513. }
  7514. attr {
  7515. key: "keep_dims"
  7516. value {
  7517. b: false
  7518. }
  7519. }
  7520. }
  7521. node {
  7522. name: "l2_regularizer/scale"
  7523. op: "Const"
  7524. attr {
  7525. key: "_output_shapes"
  7526. value {
  7527. list {
  7528. shape {
  7529. }
  7530. }
  7531. }
  7532. }
  7533. attr {
  7534. key: "dtype"
  7535. value {
  7536. type: DT_FLOAT
  7537. }
  7538. }
  7539. attr {
  7540. key: "value"
  7541. value {
  7542. tensor {
  7543. dtype: DT_FLOAT
  7544. tensor_shape {
  7545. }
  7546. float_val: 9.99999974738e-05
  7547. }
  7548. }
  7549. }
  7550. }
  7551. node {
  7552. name: "l2_regularizer/L2Loss"
  7553. op: "L2Loss"
  7554. input: "layer1/weights/read"
  7555. attr {
  7556. key: "T"
  7557. value {
  7558. type: DT_FLOAT
  7559. }
  7560. }
  7561. attr {
  7562. key: "_output_shapes"
  7563. value {
  7564. list {
  7565. shape {
  7566. }
  7567. }
  7568. }
  7569. }
  7570. }
  7571. node {
  7572. name: "l2_regularizer"
  7573. op: "Mul"
  7574. input: "l2_regularizer/scale"
  7575. input: "l2_regularizer/L2Loss"
  7576. attr {
  7577. key: "T"
  7578. value {
  7579. type: DT_FLOAT
  7580. }
  7581. }
  7582. attr {
  7583. key: "_output_shapes"
  7584. value {
  7585. list {
  7586. shape {
  7587. }
  7588. }
  7589. }
  7590. }
  7591. }
  7592. node {
  7593. name: "l2_regularizer_1/scale"
  7594. op: "Const"
  7595. attr {
  7596. key: "_output_shapes"
  7597. value {
  7598. list {
  7599. shape {
  7600. }
  7601. }
  7602. }
  7603. }
  7604. attr {
  7605. key: "dtype"
  7606. value {
  7607. type: DT_FLOAT
  7608. }
  7609. }
  7610. attr {
  7611. key: "value"
  7612. value {
  7613. tensor {
  7614. dtype: DT_FLOAT
  7615. tensor_shape {
  7616. }
  7617. float_val: 9.99999974738e-05
  7618. }
  7619. }
  7620. }
  7621. }
  7622. node {
  7623. name: "l2_regularizer_1/L2Loss"
  7624. op: "L2Loss"
  7625. input: "layer2/weights/read"
  7626. attr {
  7627. key: "T"
  7628. value {
  7629. type: DT_FLOAT
  7630. }
  7631. }
  7632. attr {
  7633. key: "_output_shapes"
  7634. value {
  7635. list {
  7636. shape {
  7637. }
  7638. }
  7639. }
  7640. }
  7641. }
  7642. node {
  7643. name: "l2_regularizer_1"
  7644. op: "Mul"
  7645. input: "l2_regularizer_1/scale"
  7646. input: "l2_regularizer_1/L2Loss"
  7647. attr {
  7648. key: "T"
  7649. value {
  7650. type: DT_FLOAT
  7651. }
  7652. }
  7653. attr {
  7654. key: "_output_shapes"
  7655. value {
  7656. list {
  7657. shape {
  7658. }
  7659. }
  7660. }
  7661. }
  7662. }
  7663. node {
  7664. name: "add"
  7665. op: "Add"
  7666. input: "l2_regularizer"
  7667. input: "l2_regularizer_1"
  7668. attr {
  7669. key: "T"
  7670. value {
  7671. type: DT_FLOAT
  7672. }
  7673. }
  7674. attr {
  7675. key: "_output_shapes"
  7676. value {
  7677. list {
  7678. shape {
  7679. }
  7680. }
  7681. }
  7682. }
  7683. }
  7684. node {
  7685. name: "add_1"
  7686. op: "Add"
  7687. input: "Mean"
  7688. input: "add"
  7689. attr {
  7690. key: "T"
  7691. value {
  7692. type: DT_FLOAT
  7693. }
  7694. }
  7695. attr {
  7696. key: "_output_shapes"
  7697. value {
  7698. list {
  7699. shape {
  7700. }
  7701. }
  7702. }
  7703. }
  7704. }
  7705. node {
  7706. name: "ExponentialDecay/learning_rate"
  7707. op: "Const"
  7708. attr {
  7709. key: "_output_shapes"
  7710. value {
  7711. list {
  7712. shape {
  7713. }
  7714. }
  7715. }
  7716. }
  7717. attr {
  7718. key: "dtype"
  7719. value {
  7720. type: DT_FLOAT
  7721. }
  7722. }
  7723. attr {
  7724. key: "value"
  7725. value {
  7726. tensor {
  7727. dtype: DT_FLOAT
  7728. tensor_shape {
  7729. }
  7730. float_val: 0.800000011921
  7731. }
  7732. }
  7733. }
  7734. }
  7735. node {
  7736. name: "ExponentialDecay/Cast"
  7737. op: "Cast"
  7738. input: "Variable/read"
  7739. attr {
  7740. key: "DstT"
  7741. value {
  7742. type: DT_FLOAT
  7743. }
  7744. }
  7745. attr {
  7746. key: "SrcT"
  7747. value {
  7748. type: DT_INT32
  7749. }
  7750. }
  7751. attr {
  7752. key: "_output_shapes"
  7753. value {
  7754. list {
  7755. shape {
  7756. }
  7757. }
  7758. }
  7759. }
  7760. }
  7761. node {
  7762. name: "ExponentialDecay/Cast_1/x"
  7763. op: "Const"
  7764. attr {
  7765. key: "_output_shapes"
  7766. value {
  7767. list {
  7768. shape {
  7769. }
  7770. }
  7771. }
  7772. }
  7773. attr {
  7774. key: "dtype"
  7775. value {
  7776. type: DT_INT32
  7777. }
  7778. }
  7779. attr {
  7780. key: "value"
  7781. value {
  7782. tensor {
  7783. dtype: DT_INT32
  7784. tensor_shape {
  7785. }
  7786. int_val: 55
  7787. }
  7788. }
  7789. }
  7790. }
  7791. node {
  7792. name: "ExponentialDecay/Cast_1"
  7793. op: "Cast"
  7794. input: "ExponentialDecay/Cast_1/x"
  7795. attr {
  7796. key: "DstT"
  7797. value {
  7798. type: DT_FLOAT
  7799. }
  7800. }
  7801. attr {
  7802. key: "SrcT"
  7803. value {
  7804. type: DT_INT32
  7805. }
  7806. }
  7807. attr {
  7808. key: "_output_shapes"
  7809. value {
  7810. list {
  7811. shape {
  7812. }
  7813. }
  7814. }
  7815. }
  7816. }
  7817. node {
  7818. name: "ExponentialDecay/Cast_2/x"
  7819. op: "Const"
  7820. attr {
  7821. key: "_output_shapes"
  7822. value {
  7823. list {
  7824. shape {
  7825. }
  7826. }
  7827. }
  7828. }
  7829. attr {
  7830. key: "dtype"
  7831. value {
  7832. type: DT_FLOAT
  7833. }
  7834. }
  7835. attr {
  7836. key: "value"
  7837. value {
  7838. tensor {
  7839. dtype: DT_FLOAT
  7840. tensor_shape {
  7841. }
  7842. float_val: 0.999000012875
  7843. }
  7844. }
  7845. }
  7846. }
  7847. node {
  7848. name: "ExponentialDecay/truediv"
  7849. op: "RealDiv"
  7850. input: "ExponentialDecay/Cast"
  7851. input: "ExponentialDecay/Cast_1"
  7852. attr {
  7853. key: "T"
  7854. value {
  7855. type: DT_FLOAT
  7856. }
  7857. }
  7858. attr {
  7859. key: "_output_shapes"
  7860. value {
  7861. list {
  7862. shape {
  7863. }
  7864. }
  7865. }
  7866. }
  7867. }
  7868. node {
  7869. name: "ExponentialDecay/Pow"
  7870. op: "Pow"
  7871. input: "ExponentialDecay/Cast_2/x"
  7872. input: "ExponentialDecay/truediv"
  7873. attr {
  7874. key: "T"
  7875. value {
  7876. type: DT_FLOAT
  7877. }
  7878. }
  7879. attr {
  7880. key: "_output_shapes"
  7881. value {
  7882. list {
  7883. shape {
  7884. }
  7885. }
  7886. }
  7887. }
  7888. }
  7889. node {
  7890. name: "ExponentialDecay"
  7891. op: "Mul"
  7892. input: "ExponentialDecay/learning_rate"
  7893. input: "ExponentialDecay/Pow"
  7894. attr {
  7895. key: "T"
  7896. value {
  7897. type: DT_FLOAT
  7898. }
  7899. }
  7900. attr {
  7901. key: "_output_shapes"
  7902. value {
  7903. list {
  7904. shape {
  7905. }
  7906. }
  7907. }
  7908. }
  7909. }
  7910. node {
  7911. name: "gradients/Shape"
  7912. op: "Const"
  7913. attr {
  7914. key: "_output_shapes"
  7915. value {
  7916. list {
  7917. shape {
  7918. dim {
  7919. }
  7920. }
  7921. }
  7922. }
  7923. }
  7924. attr {
  7925. key: "dtype"
  7926. value {
  7927. type: DT_INT32
  7928. }
  7929. }
  7930. attr {
  7931. key: "value"
  7932. value {
  7933. tensor {
  7934. dtype: DT_INT32
  7935. tensor_shape {
  7936. dim {
  7937. }
  7938. }
  7939. }
  7940. }
  7941. }
  7942. }
  7943. node {
  7944. name: "gradients/grad_ys_0"
  7945. op: "Const"
  7946. attr {
  7947. key: "_output_shapes"
  7948. value {
  7949. list {
  7950. shape {
  7951. }
  7952. }
  7953. }
  7954. }
  7955. attr {
  7956. key: "dtype"
  7957. value {
  7958. type: DT_FLOAT
  7959. }
  7960. }
  7961. attr {
  7962. key: "value"
  7963. value {
  7964. tensor {
  7965. dtype: DT_FLOAT
  7966. tensor_shape {
  7967. }
  7968. float_val: 1.0
  7969. }
  7970. }
  7971. }
  7972. }
  7973. node {
  7974. name: "gradients/Fill"
  7975. op: "Fill"
  7976. input: "gradients/Shape"
  7977. input: "gradients/grad_ys_0"
  7978. attr {
  7979. key: "T"
  7980. value {
  7981. type: DT_FLOAT
  7982. }
  7983. }
  7984. attr {
  7985. key: "_output_shapes"
  7986. value {
  7987. list {
  7988. shape {
  7989. }
  7990. }
  7991. }
  7992. }
  7993. attr {
  7994. key: "index_type"
  7995. value {
  7996. type: DT_INT32
  7997. }
  7998. }
  7999. }
  8000. node {
  8001. name: "gradients/add_1_grad/tuple/group_deps"
  8002. op: "NoOp"
  8003. input: "^gradients/Fill"
  8004. }
  8005. node {
  8006. name: "gradients/add_1_grad/tuple/control_dependency"
  8007. op: "Identity"
  8008. input: "gradients/Fill"
  8009. input: "^gradients/add_1_grad/tuple/group_deps"
  8010. attr {
  8011. key: "T"
  8012. value {
  8013. type: DT_FLOAT
  8014. }
  8015. }
  8016. attr {
  8017. key: "_class"
  8018. value {
  8019. list {
  8020. s: "loc:@gradients/Fill"
  8021. }
  8022. }
  8023. }
  8024. attr {
  8025. key: "_output_shapes"
  8026. value {
  8027. list {
  8028. shape {
  8029. }
  8030. }
  8031. }
  8032. }
  8033. }
  8034. node {
  8035. name: "gradients/add_1_grad/tuple/control_dependency_1"
  8036. op: "Identity"
  8037. input: "gradients/Fill"
  8038. input: "^gradients/add_1_grad/tuple/group_deps"
  8039. attr {
  8040. key: "T"
  8041. value {
  8042. type: DT_FLOAT
  8043. }
  8044. }
  8045. attr {
  8046. key: "_class"
  8047. value {
  8048. list {
  8049. s: "loc:@gradients/Fill"
  8050. }
  8051. }
  8052. }
  8053. attr {
  8054. key: "_output_shapes"
  8055. value {
  8056. list {
  8057. shape {
  8058. }
  8059. }
  8060. }
  8061. }
  8062. }
  8063. node {
  8064. name: "gradients/Mean_grad/Reshape/shape"
  8065. op: "Const"
  8066. attr {
  8067. key: "_output_shapes"
  8068. value {
  8069. list {
  8070. shape {
  8071. dim {
  8072. size: 1
  8073. }
  8074. }
  8075. }
  8076. }
  8077. }
  8078. attr {
  8079. key: "dtype"
  8080. value {
  8081. type: DT_INT32
  8082. }
  8083. }
  8084. attr {
  8085. key: "value"
  8086. value {
  8087. tensor {
  8088. dtype: DT_INT32
  8089. tensor_shape {
  8090. dim {
  8091. size: 1
  8092. }
  8093. }
  8094. int_val: 1
  8095. }
  8096. }
  8097. }
  8098. }
  8099. node {
  8100. name: "gradients/Mean_grad/Reshape"
  8101. op: "Reshape"
  8102. input: "gradients/add_1_grad/tuple/control_dependency"
  8103. input: "gradients/Mean_grad/Reshape/shape"
  8104. attr {
  8105. key: "T"
  8106. value {
  8107. type: DT_FLOAT
  8108. }
  8109. }
  8110. attr {
  8111. key: "Tshape"
  8112. value {
  8113. type: DT_INT32
  8114. }
  8115. }
  8116. attr {
  8117. key: "_output_shapes"
  8118. value {
  8119. list {
  8120. shape {
  8121. dim {
  8122. size: 1
  8123. }
  8124. }
  8125. }
  8126. }
  8127. }
  8128. }
  8129. node {
  8130. name: "gradients/Mean_grad/Shape"
  8131. op: "Shape"
  8132. input: "SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
  8133. attr {
  8134. key: "T"
  8135. value {
  8136. type: DT_FLOAT
  8137. }
  8138. }
  8139. attr {
  8140. key: "_output_shapes"
  8141. value {
  8142. list {
  8143. shape {
  8144. dim {
  8145. size: 1
  8146. }
  8147. }
  8148. }
  8149. }
  8150. }
  8151. attr {
  8152. key: "out_type"
  8153. value {
  8154. type: DT_INT32
  8155. }
  8156. }
  8157. }
  8158. node {
  8159. name: "gradients/Mean_grad/Tile"
  8160. op: "Tile"
  8161. input: "gradients/Mean_grad/Reshape"
  8162. input: "gradients/Mean_grad/Shape"
  8163. attr {
  8164. key: "T"
  8165. value {
  8166. type: DT_FLOAT
  8167. }
  8168. }
  8169. attr {
  8170. key: "Tmultiples"
  8171. value {
  8172. type: DT_INT32
  8173. }
  8174. }
  8175. attr {
  8176. key: "_output_shapes"
  8177. value {
  8178. list {
  8179. shape {
  8180. dim {
  8181. size: -1
  8182. }
  8183. }
  8184. }
  8185. }
  8186. }
  8187. }
  8188. node {
  8189. name: "gradients/Mean_grad/Shape_1"
  8190. op: "Shape"
  8191. input: "SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
  8192. attr {
  8193. key: "T"
  8194. value {
  8195. type: DT_FLOAT
  8196. }
  8197. }
  8198. attr {
  8199. key: "_output_shapes"
  8200. value {
  8201. list {
  8202. shape {
  8203. dim {
  8204. size: 1
  8205. }
  8206. }
  8207. }
  8208. }
  8209. }
  8210. attr {
  8211. key: "out_type"
  8212. value {
  8213. type: DT_INT32
  8214. }
  8215. }
  8216. }
  8217. node {
  8218. name: "gradients/Mean_grad/Shape_2"
  8219. op: "Const"
  8220. attr {
  8221. key: "_output_shapes"
  8222. value {
  8223. list {
  8224. shape {
  8225. dim {
  8226. }
  8227. }
  8228. }
  8229. }
  8230. }
  8231. attr {
  8232. key: "dtype"
  8233. value {
  8234. type: DT_INT32
  8235. }
  8236. }
  8237. attr {
  8238. key: "value"
  8239. value {
  8240. tensor {
  8241. dtype: DT_INT32
  8242. tensor_shape {
  8243. dim {
  8244. }
  8245. }
  8246. }
  8247. }
  8248. }
  8249. }
  8250. node {
  8251. name: "gradients/Mean_grad/Const"
  8252. op: "Const"
  8253. attr {
  8254. key: "_output_shapes"
  8255. value {
  8256. list {
  8257. shape {
  8258. dim {
  8259. size: 1
  8260. }
  8261. }
  8262. }
  8263. }
  8264. }
  8265. attr {
  8266. key: "dtype"
  8267. value {
  8268. type: DT_INT32
  8269. }
  8270. }
  8271. attr {
  8272. key: "value"
  8273. value {
  8274. tensor {
  8275. dtype: DT_INT32
  8276. tensor_shape {
  8277. dim {
  8278. size: 1
  8279. }
  8280. }
  8281. int_val: 0
  8282. }
  8283. }
  8284. }
  8285. }
  8286. node {
  8287. name: "gradients/Mean_grad/Prod"
  8288. op: "Prod"
  8289. input: "gradients/Mean_grad/Shape_1"
  8290. input: "gradients/Mean_grad/Const"
  8291. attr {
  8292. key: "T"
  8293. value {
  8294. type: DT_INT32
  8295. }
  8296. }
  8297. attr {
  8298. key: "Tidx"
  8299. value {
  8300. type: DT_INT32
  8301. }
  8302. }
  8303. attr {
  8304. key: "_output_shapes"
  8305. value {
  8306. list {
  8307. shape {
  8308. }
  8309. }
  8310. }
  8311. }
  8312. attr {
  8313. key: "keep_dims"
  8314. value {
  8315. b: false
  8316. }
  8317. }
  8318. }
  8319. node {
  8320. name: "gradients/Mean_grad/Const_1"
  8321. op: "Const"
  8322. attr {
  8323. key: "_output_shapes"
  8324. value {
  8325. list {
  8326. shape {
  8327. dim {
  8328. size: 1
  8329. }
  8330. }
  8331. }
  8332. }
  8333. }
  8334. attr {
  8335. key: "dtype"
  8336. value {
  8337. type: DT_INT32
  8338. }
  8339. }
  8340. attr {
  8341. key: "value"
  8342. value {
  8343. tensor {
  8344. dtype: DT_INT32
  8345. tensor_shape {
  8346. dim {
  8347. size: 1
  8348. }
  8349. }
  8350. int_val: 0
  8351. }
  8352. }
  8353. }
  8354. }
  8355. node {
  8356. name: "gradients/Mean_grad/Prod_1"
  8357. op: "Prod"
  8358. input: "gradients/Mean_grad/Shape_2"
  8359. input: "gradients/Mean_grad/Const_1"
  8360. attr {
  8361. key: "T"
  8362. value {
  8363. type: DT_INT32
  8364. }
  8365. }
  8366. attr {
  8367. key: "Tidx"
  8368. value {
  8369. type: DT_INT32
  8370. }
  8371. }
  8372. attr {
  8373. key: "_output_shapes"
  8374. value {
  8375. list {
  8376. shape {
  8377. }
  8378. }
  8379. }
  8380. }
  8381. attr {
  8382. key: "keep_dims"
  8383. value {
  8384. b: false
  8385. }
  8386. }
  8387. }
  8388. node {
  8389. name: "gradients/Mean_grad/Maximum/y"
  8390. op: "Const"
  8391. attr {
  8392. key: "_output_shapes"
  8393. value {
  8394. list {
  8395. shape {
  8396. }
  8397. }
  8398. }
  8399. }
  8400. attr {
  8401. key: "dtype"
  8402. value {
  8403. type: DT_INT32
  8404. }
  8405. }
  8406. attr {
  8407. key: "value"
  8408. value {
  8409. tensor {
  8410. dtype: DT_INT32
  8411. tensor_shape {
  8412. }
  8413. int_val: 1
  8414. }
  8415. }
  8416. }
  8417. }
  8418. node {
  8419. name: "gradients/Mean_grad/Maximum"
  8420. op: "Maximum"
  8421. input: "gradients/Mean_grad/Prod_1"
  8422. input: "gradients/Mean_grad/Maximum/y"
  8423. attr {
  8424. key: "T"
  8425. value {
  8426. type: DT_INT32
  8427. }
  8428. }
  8429. attr {
  8430. key: "_output_shapes"
  8431. value {
  8432. list {
  8433. shape {
  8434. }
  8435. }
  8436. }
  8437. }
  8438. }
  8439. node {
  8440. name: "gradients/Mean_grad/floordiv"
  8441. op: "FloorDiv"
  8442. input: "gradients/Mean_grad/Prod"
  8443. input: "gradients/Mean_grad/Maximum"
  8444. attr {
  8445. key: "T"
  8446. value {
  8447. type: DT_INT32
  8448. }
  8449. }
  8450. attr {
  8451. key: "_output_shapes"
  8452. value {
  8453. list {
  8454. shape {
  8455. }
  8456. }
  8457. }
  8458. }
  8459. }
  8460. node {
  8461. name: "gradients/Mean_grad/Cast"
  8462. op: "Cast"
  8463. input: "gradients/Mean_grad/floordiv"
  8464. attr {
  8465. key: "DstT"
  8466. value {
  8467. type: DT_FLOAT
  8468. }
  8469. }
  8470. attr {
  8471. key: "SrcT"
  8472. value {
  8473. type: DT_INT32
  8474. }
  8475. }
  8476. attr {
  8477. key: "_output_shapes"
  8478. value {
  8479. list {
  8480. shape {
  8481. }
  8482. }
  8483. }
  8484. }
  8485. }
  8486. node {
  8487. name: "gradients/Mean_grad/truediv"
  8488. op: "RealDiv"
  8489. input: "gradients/Mean_grad/Tile"
  8490. input: "gradients/Mean_grad/Cast"
  8491. attr {
  8492. key: "T"
  8493. value {
  8494. type: DT_FLOAT
  8495. }
  8496. }
  8497. attr {
  8498. key: "_output_shapes"
  8499. value {
  8500. list {
  8501. shape {
  8502. dim {
  8503. size: -1
  8504. }
  8505. }
  8506. }
  8507. }
  8508. }
  8509. }
  8510. node {
  8511. name: "gradients/add_grad/tuple/group_deps"
  8512. op: "NoOp"
  8513. input: "^gradients/add_1_grad/tuple/control_dependency_1"
  8514. }
  8515. node {
  8516. name: "gradients/add_grad/tuple/control_dependency"
  8517. op: "Identity"
  8518. input: "gradients/add_1_grad/tuple/control_dependency_1"
  8519. input: "^gradients/add_grad/tuple/group_deps"
  8520. attr {
  8521. key: "T"
  8522. value {
  8523. type: DT_FLOAT
  8524. }
  8525. }
  8526. attr {
  8527. key: "_class"
  8528. value {
  8529. list {
  8530. s: "loc:@gradients/Fill"
  8531. }
  8532. }
  8533. }
  8534. attr {
  8535. key: "_output_shapes"
  8536. value {
  8537. list {
  8538. shape {
  8539. }
  8540. }
  8541. }
  8542. }
  8543. }
  8544. node {
  8545. name: "gradients/add_grad/tuple/control_dependency_1"
  8546. op: "Identity"
  8547. input: "gradients/add_1_grad/tuple/control_dependency_1"
  8548. input: "^gradients/add_grad/tuple/group_deps"
  8549. attr {
  8550. key: "T"
  8551. value {
  8552. type: DT_FLOAT
  8553. }
  8554. }
  8555. attr {
  8556. key: "_class"
  8557. value {
  8558. list {
  8559. s: "loc:@gradients/Fill"
  8560. }
  8561. }
  8562. }
  8563. attr {
  8564. key: "_output_shapes"
  8565. value {
  8566. list {
  8567. shape {
  8568. }
  8569. }
  8570. }
  8571. }
  8572. }
  8573. node {
  8574. name: "gradients/zeros_like"
  8575. op: "ZerosLike"
  8576. input: "SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits:1"
  8577. attr {
  8578. key: "T"
  8579. value {
  8580. type: DT_FLOAT
  8581. }
  8582. }
  8583. attr {
  8584. key: "_output_shapes"
  8585. value {
  8586. list {
  8587. shape {
  8588. dim {
  8589. size: -1
  8590. }
  8591. dim {
  8592. size: 10
  8593. }
  8594. }
  8595. }
  8596. }
  8597. }
  8598. }
  8599. node {
  8600. name: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/PreventGradient"
  8601. op: "PreventGradient"
  8602. input: "SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits:1"
  8603. attr {
  8604. key: "T"
  8605. value {
  8606. type: DT_FLOAT
  8607. }
  8608. }
  8609. attr {
  8610. key: "_output_shapes"
  8611. value {
  8612. list {
  8613. shape {
  8614. dim {
  8615. size: -1
  8616. }
  8617. dim {
  8618. size: 10
  8619. }
  8620. }
  8621. }
  8622. }
  8623. }
  8624. attr {
  8625. key: "message"
  8626. value {
  8627. s: "Currently there is no way to take the second derivative of sparse_softmax_cross_entropy_with_logits due to the fused implementation\'s interaction with tf.gradients()"
  8628. }
  8629. }
  8630. }
  8631. node {
  8632. name: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims/dim"
  8633. op: "Const"
  8634. attr {
  8635. key: "_output_shapes"
  8636. value {
  8637. list {
  8638. shape {
  8639. }
  8640. }
  8641. }
  8642. }
  8643. attr {
  8644. key: "dtype"
  8645. value {
  8646. type: DT_INT32
  8647. }
  8648. }
  8649. attr {
  8650. key: "value"
  8651. value {
  8652. tensor {
  8653. dtype: DT_INT32
  8654. tensor_shape {
  8655. }
  8656. int_val: -1
  8657. }
  8658. }
  8659. }
  8660. }
  8661. node {
  8662. name: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims"
  8663. op: "ExpandDims"
  8664. input: "gradients/Mean_grad/truediv"
  8665. input: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims/dim"
  8666. attr {
  8667. key: "T"
  8668. value {
  8669. type: DT_FLOAT
  8670. }
  8671. }
  8672. attr {
  8673. key: "Tdim"
  8674. value {
  8675. type: DT_INT32
  8676. }
  8677. }
  8678. attr {
  8679. key: "_output_shapes"
  8680. value {
  8681. list {
  8682. shape {
  8683. dim {
  8684. size: -1
  8685. }
  8686. dim {
  8687. size: 1
  8688. }
  8689. }
  8690. }
  8691. }
  8692. }
  8693. }
  8694. node {
  8695. name: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/mul"
  8696. op: "Mul"
  8697. input: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims"
  8698. input: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/PreventGradient"
  8699. attr {
  8700. key: "T"
  8701. value {
  8702. type: DT_FLOAT
  8703. }
  8704. }
  8705. attr {
  8706. key: "_output_shapes"
  8707. value {
  8708. list {
  8709. shape {
  8710. dim {
  8711. size: -1
  8712. }
  8713. dim {
  8714. size: 10
  8715. }
  8716. }
  8717. }
  8718. }
  8719. }
  8720. }
  8721. node {
  8722. name: "gradients/l2_regularizer_grad/Mul"
  8723. op: "Mul"
  8724. input: "gradients/add_grad/tuple/control_dependency"
  8725. input: "l2_regularizer/L2Loss"
  8726. attr {
  8727. key: "T"
  8728. value {
  8729. type: DT_FLOAT
  8730. }
  8731. }
  8732. attr {
  8733. key: "_output_shapes"
  8734. value {
  8735. list {
  8736. shape {
  8737. }
  8738. }
  8739. }
  8740. }
  8741. }
  8742. node {
  8743. name: "gradients/l2_regularizer_grad/Mul_1"
  8744. op: "Mul"
  8745. input: "gradients/add_grad/tuple/control_dependency"
  8746. input: "l2_regularizer/scale"
  8747. attr {
  8748. key: "T"
  8749. value {
  8750. type: DT_FLOAT
  8751. }
  8752. }
  8753. attr {
  8754. key: "_output_shapes"
  8755. value {
  8756. list {
  8757. shape {
  8758. }
  8759. }
  8760. }
  8761. }
  8762. }
  8763. node {
  8764. name: "gradients/l2_regularizer_grad/tuple/group_deps"
  8765. op: "NoOp"
  8766. input: "^gradients/l2_regularizer_grad/Mul"
  8767. input: "^gradients/l2_regularizer_grad/Mul_1"
  8768. }
  8769. node {
  8770. name: "gradients/l2_regularizer_grad/tuple/control_dependency"
  8771. op: "Identity"
  8772. input: "gradients/l2_regularizer_grad/Mul"
  8773. input: "^gradients/l2_regularizer_grad/tuple/group_deps"
  8774. attr {
  8775. key: "T"
  8776. value {
  8777. type: DT_FLOAT
  8778. }
  8779. }
  8780. attr {
  8781. key: "_class"
  8782. value {
  8783. list {
  8784. s: "loc:@gradients/l2_regularizer_grad/Mul"
  8785. }
  8786. }
  8787. }
  8788. attr {
  8789. key: "_output_shapes"
  8790. value {
  8791. list {
  8792. shape {
  8793. }
  8794. }
  8795. }
  8796. }
  8797. }
  8798. node {
  8799. name: "gradients/l2_regularizer_grad/tuple/control_dependency_1"
  8800. op: "Identity"
  8801. input: "gradients/l2_regularizer_grad/Mul_1"
  8802. input: "^gradients/l2_regularizer_grad/tuple/group_deps"
  8803. attr {
  8804. key: "T"
  8805. value {
  8806. type: DT_FLOAT
  8807. }
  8808. }
  8809. attr {
  8810. key: "_class"
  8811. value {
  8812. list {
  8813. s: "loc:@gradients/l2_regularizer_grad/Mul_1"
  8814. }
  8815. }
  8816. }
  8817. attr {
  8818. key: "_output_shapes"
  8819. value {
  8820. list {
  8821. shape {
  8822. }
  8823. }
  8824. }
  8825. }
  8826. }
  8827. node {
  8828. name: "gradients/l2_regularizer_1_grad/Mul"
  8829. op: "Mul"
  8830. input: "gradients/add_grad/tuple/control_dependency_1"
  8831. input: "l2_regularizer_1/L2Loss"
  8832. attr {
  8833. key: "T"
  8834. value {
  8835. type: DT_FLOAT
  8836. }
  8837. }
  8838. attr {
  8839. key: "_output_shapes"
  8840. value {
  8841. list {
  8842. shape {
  8843. }
  8844. }
  8845. }
  8846. }
  8847. }
  8848. node {
  8849. name: "gradients/l2_regularizer_1_grad/Mul_1"
  8850. op: "Mul"
  8851. input: "gradients/add_grad/tuple/control_dependency_1"
  8852. input: "l2_regularizer_1/scale"
  8853. attr {
  8854. key: "T"
  8855. value {
  8856. type: DT_FLOAT
  8857. }
  8858. }
  8859. attr {
  8860. key: "_output_shapes"
  8861. value {
  8862. list {
  8863. shape {
  8864. }
  8865. }
  8866. }
  8867. }
  8868. }
  8869. node {
  8870. name: "gradients/l2_regularizer_1_grad/tuple/group_deps"
  8871. op: "NoOp"
  8872. input: "^gradients/l2_regularizer_1_grad/Mul"
  8873. input: "^gradients/l2_regularizer_1_grad/Mul_1"
  8874. }
  8875. node {
  8876. name: "gradients/l2_regularizer_1_grad/tuple/control_dependency"
  8877. op: "Identity"
  8878. input: "gradients/l2_regularizer_1_grad/Mul"
  8879. input: "^gradients/l2_regularizer_1_grad/tuple/group_deps"
  8880. attr {
  8881. key: "T"
  8882. value {
  8883. type: DT_FLOAT
  8884. }
  8885. }
  8886. attr {
  8887. key: "_class"
  8888. value {
  8889. list {
  8890. s: "loc:@gradients/l2_regularizer_1_grad/Mul"
  8891. }
  8892. }
  8893. }
  8894. attr {
  8895. key: "_output_shapes"
  8896. value {
  8897. list {
  8898. shape {
  8899. }
  8900. }
  8901. }
  8902. }
  8903. }
  8904. node {
  8905. name: "gradients/l2_regularizer_1_grad/tuple/control_dependency_1"
  8906. op: "Identity"
  8907. input: "gradients/l2_regularizer_1_grad/Mul_1"
  8908. input: "^gradients/l2_regularizer_1_grad/tuple/group_deps"
  8909. attr {
  8910. key: "T"
  8911. value {
  8912. type: DT_FLOAT
  8913. }
  8914. }
  8915. attr {
  8916. key: "_class"
  8917. value {
  8918. list {
  8919. s: "loc:@gradients/l2_regularizer_1_grad/Mul_1"
  8920. }
  8921. }
  8922. }
  8923. attr {
  8924. key: "_output_shapes"
  8925. value {
  8926. list {
  8927. shape {
  8928. }
  8929. }
  8930. }
  8931. }
  8932. }
  8933. node {
  8934. name: "gradients/layer2_1/add_grad/Shape"
  8935. op: "Shape"
  8936. input: "layer2_1/MatMul"
  8937. attr {
  8938. key: "T"
  8939. value {
  8940. type: DT_FLOAT
  8941. }
  8942. }
  8943. attr {
  8944. key: "_output_shapes"
  8945. value {
  8946. list {
  8947. shape {
  8948. dim {
  8949. size: 2
  8950. }
  8951. }
  8952. }
  8953. }
  8954. }
  8955. attr {
  8956. key: "out_type"
  8957. value {
  8958. type: DT_INT32
  8959. }
  8960. }
  8961. }
  8962. node {
  8963. name: "gradients/layer2_1/add_grad/Shape_1"
  8964. op: "Const"
  8965. attr {
  8966. key: "_output_shapes"
  8967. value {
  8968. list {
  8969. shape {
  8970. dim {
  8971. size: 1
  8972. }
  8973. }
  8974. }
  8975. }
  8976. }
  8977. attr {
  8978. key: "dtype"
  8979. value {
  8980. type: DT_INT32
  8981. }
  8982. }
  8983. attr {
  8984. key: "value"
  8985. value {
  8986. tensor {
  8987. dtype: DT_INT32
  8988. tensor_shape {
  8989. dim {
  8990. size: 1
  8991. }
  8992. }
  8993. int_val: 10
  8994. }
  8995. }
  8996. }
  8997. }
  8998. node {
  8999. name: "gradients/layer2_1/add_grad/BroadcastGradientArgs"
  9000. op: "BroadcastGradientArgs"
  9001. input: "gradients/layer2_1/add_grad/Shape"
  9002. input: "gradients/layer2_1/add_grad/Shape_1"
  9003. attr {
  9004. key: "T"
  9005. value {
  9006. type: DT_INT32
  9007. }
  9008. }
  9009. attr {
  9010. key: "_output_shapes"
  9011. value {
  9012. list {
  9013. shape {
  9014. dim {
  9015. size: -1
  9016. }
  9017. }
  9018. shape {
  9019. dim {
  9020. size: -1
  9021. }
  9022. }
  9023. }
  9024. }
  9025. }
  9026. }
  9027. node {
  9028. name: "gradients/layer2_1/add_grad/Sum"
  9029. op: "Sum"
  9030. input: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/mul"
  9031. input: "gradients/layer2_1/add_grad/BroadcastGradientArgs"
  9032. attr {
  9033. key: "T"
  9034. value {
  9035. type: DT_FLOAT
  9036. }
  9037. }
  9038. attr {
  9039. key: "Tidx"
  9040. value {
  9041. type: DT_INT32
  9042. }
  9043. }
  9044. attr {
  9045. key: "_output_shapes"
  9046. value {
  9047. list {
  9048. shape {
  9049. unknown_rank: true
  9050. }
  9051. }
  9052. }
  9053. }
  9054. attr {
  9055. key: "keep_dims"
  9056. value {
  9057. b: false
  9058. }
  9059. }
  9060. }
  9061. node {
  9062. name: "gradients/layer2_1/add_grad/Reshape"
  9063. op: "Reshape"
  9064. input: "gradients/layer2_1/add_grad/Sum"
  9065. input: "gradients/layer2_1/add_grad/Shape"
  9066. attr {
  9067. key: "T"
  9068. value {
  9069. type: DT_FLOAT
  9070. }
  9071. }
  9072. attr {
  9073. key: "Tshape"
  9074. value {
  9075. type: DT_INT32
  9076. }
  9077. }
  9078. attr {
  9079. key: "_output_shapes"
  9080. value {
  9081. list {
  9082. shape {
  9083. dim {
  9084. size: -1
  9085. }
  9086. dim {
  9087. size: 10
  9088. }
  9089. }
  9090. }
  9091. }
  9092. }
  9093. }
  9094. node {
  9095. name: "gradients/layer2_1/add_grad/Sum_1"
  9096. op: "Sum"
  9097. input: "gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/mul"
  9098. input: "gradients/layer2_1/add_grad/BroadcastGradientArgs:1"
  9099. attr {
  9100. key: "T"
  9101. value {
  9102. type: DT_FLOAT
  9103. }
  9104. }
  9105. attr {
  9106. key: "Tidx"
  9107. value {
  9108. type: DT_INT32
  9109. }
  9110. }
  9111. attr {
  9112. key: "_output_shapes"
  9113. value {
  9114. list {
  9115. shape {
  9116. unknown_rank: true
  9117. }
  9118. }
  9119. }
  9120. }
  9121. attr {
  9122. key: "keep_dims"
  9123. value {
  9124. b: false
  9125. }
  9126. }
  9127. }
  9128. node {
  9129. name: "gradients/layer2_1/add_grad/Reshape_1"
  9130. op: "Reshape"
  9131. input: "gradients/layer2_1/add_grad/Sum_1"
  9132. input: "gradients/layer2_1/add_grad/Shape_1"
  9133. attr {
  9134. key: "T"
  9135. value {
  9136. type: DT_FLOAT
  9137. }
  9138. }
  9139. attr {
  9140. key: "Tshape"
  9141. value {
  9142. type: DT_INT32
  9143. }
  9144. }
  9145. attr {
  9146. key: "_output_shapes"
  9147. value {
  9148. list {
  9149. shape {
  9150. dim {
  9151. size: 10
  9152. }
  9153. }
  9154. }
  9155. }
  9156. }
  9157. }
  9158. node {
  9159. name: "gradients/layer2_1/add_grad/tuple/group_deps"
  9160. op: "NoOp"
  9161. input: "^gradients/layer2_1/add_grad/Reshape"
  9162. input: "^gradients/layer2_1/add_grad/Reshape_1"
  9163. }
  9164. node {
  9165. name: "gradients/layer2_1/add_grad/tuple/control_dependency"
  9166. op: "Identity"
  9167. input: "gradients/layer2_1/add_grad/Reshape"
  9168. input: "^gradients/layer2_1/add_grad/tuple/group_deps"
  9169. attr {
  9170. key: "T"
  9171. value {
  9172. type: DT_FLOAT
  9173. }
  9174. }
  9175. attr {
  9176. key: "_class"
  9177. value {
  9178. list {
  9179. s: "loc:@gradients/layer2_1/add_grad/Reshape"
  9180. }
  9181. }
  9182. }
  9183. attr {
  9184. key: "_output_shapes"
  9185. value {
  9186. list {
  9187. shape {
  9188. dim {
  9189. size: -1
  9190. }
  9191. dim {
  9192. size: 10
  9193. }
  9194. }
  9195. }
  9196. }
  9197. }
  9198. }
  9199. node {
  9200. name: "gradients/layer2_1/add_grad/tuple/control_dependency_1"
  9201. op: "Identity"
  9202. input: "gradients/layer2_1/add_grad/Reshape_1"
  9203. input: "^gradients/layer2_1/add_grad/tuple/group_deps"
  9204. attr {
  9205. key: "T"
  9206. value {
  9207. type: DT_FLOAT
  9208. }
  9209. }
  9210. attr {
  9211. key: "_class"
  9212. value {
  9213. list {
  9214. s: "loc:@gradients/layer2_1/add_grad/Reshape_1"
  9215. }
  9216. }
  9217. }
  9218. attr {
  9219. key: "_output_shapes"
  9220. value {
  9221. list {
  9222. shape {
  9223. dim {
  9224. size: 10
  9225. }
  9226. }
  9227. }
  9228. }
  9229. }
  9230. }
  9231. node {
  9232. name: "gradients/l2_regularizer/L2Loss_grad/mul"
  9233. op: "Mul"
  9234. input: "layer1/weights/read"
  9235. input: "gradients/l2_regularizer_grad/tuple/control_dependency_1"
  9236. attr {
  9237. key: "T"
  9238. value {
  9239. type: DT_FLOAT
  9240. }
  9241. }
  9242. attr {
  9243. key: "_output_shapes"
  9244. value {
  9245. list {
  9246. shape {
  9247. dim {
  9248. size: 784
  9249. }
  9250. dim {
  9251. size: 500
  9252. }
  9253. }
  9254. }
  9255. }
  9256. }
  9257. }
  9258. node {
  9259. name: "gradients/l2_regularizer_1/L2Loss_grad/mul"
  9260. op: "Mul"
  9261. input: "layer2/weights/read"
  9262. input: "gradients/l2_regularizer_1_grad/tuple/control_dependency_1"
  9263. attr {
  9264. key: "T"
  9265. value {
  9266. type: DT_FLOAT
  9267. }
  9268. }
  9269. attr {
  9270. key: "_output_shapes"
  9271. value {
  9272. list {
  9273. shape {
  9274. dim {
  9275. size: 500
  9276. }
  9277. dim {
  9278. size: 10
  9279. }
  9280. }
  9281. }
  9282. }
  9283. }
  9284. }
  9285. node {
  9286. name: "gradients/layer2_1/MatMul_grad/MatMul"
  9287. op: "MatMul"
  9288. input: "gradients/layer2_1/add_grad/tuple/control_dependency"
  9289. input: "layer2/weights/read"
  9290. attr {
  9291. key: "T"
  9292. value {
  9293. type: DT_FLOAT
  9294. }
  9295. }
  9296. attr {
  9297. key: "_output_shapes"
  9298. value {
  9299. list {
  9300. shape {
  9301. dim {
  9302. size: -1
  9303. }
  9304. dim {
  9305. size: 500
  9306. }
  9307. }
  9308. }
  9309. }
  9310. }
  9311. attr {
  9312. key: "transpose_a"
  9313. value {
  9314. b: false
  9315. }
  9316. }
  9317. attr {
  9318. key: "transpose_b"
  9319. value {
  9320. b: true
  9321. }
  9322. }
  9323. }
  9324. node {
  9325. name: "gradients/layer2_1/MatMul_grad/MatMul_1"
  9326. op: "MatMul"
  9327. input: "layer1_1/Relu"
  9328. input: "gradients/layer2_1/add_grad/tuple/control_dependency"
  9329. attr {
  9330. key: "T"
  9331. value {
  9332. type: DT_FLOAT
  9333. }
  9334. }
  9335. attr {
  9336. key: "_output_shapes"
  9337. value {
  9338. list {
  9339. shape {
  9340. dim {
  9341. size: 500
  9342. }
  9343. dim {
  9344. size: 10
  9345. }
  9346. }
  9347. }
  9348. }
  9349. }
  9350. attr {
  9351. key: "transpose_a"
  9352. value {
  9353. b: true
  9354. }
  9355. }
  9356. attr {
  9357. key: "transpose_b"
  9358. value {
  9359. b: false
  9360. }
  9361. }
  9362. }
  9363. node {
  9364. name: "gradients/layer2_1/MatMul_grad/tuple/group_deps"
  9365. op: "NoOp"
  9366. input: "^gradients/layer2_1/MatMul_grad/MatMul"
  9367. input: "^gradients/layer2_1/MatMul_grad/MatMul_1"
  9368. }
  9369. node {
  9370. name: "gradients/layer2_1/MatMul_grad/tuple/control_dependency"
  9371. op: "Identity"
  9372. input: "gradients/layer2_1/MatMul_grad/MatMul"
  9373. input: "^gradients/layer2_1/MatMul_grad/tuple/group_deps"
  9374. attr {
  9375. key: "T"
  9376. value {
  9377. type: DT_FLOAT
  9378. }
  9379. }
  9380. attr {
  9381. key: "_class"
  9382. value {
  9383. list {
  9384. s: "loc:@gradients/layer2_1/MatMul_grad/MatMul"
  9385. }
  9386. }
  9387. }
  9388. attr {
  9389. key: "_output_shapes"
  9390. value {
  9391. list {
  9392. shape {
  9393. dim {
  9394. size: -1
  9395. }
  9396. dim {
  9397. size: 500
  9398. }
  9399. }
  9400. }
  9401. }
  9402. }
  9403. }
  9404. node {
  9405. name: "gradients/layer2_1/MatMul_grad/tuple/control_dependency_1"
  9406. op: "Identity"
  9407. input: "gradients/layer2_1/MatMul_grad/MatMul_1"
  9408. input: "^gradients/layer2_1/MatMul_grad/tuple/group_deps"
  9409. attr {
  9410. key: "T"
  9411. value {
  9412. type: DT_FLOAT
  9413. }
  9414. }
  9415. attr {
  9416. key: "_class"
  9417. value {
  9418. list {
  9419. s: "loc:@gradients/layer2_1/MatMul_grad/MatMul_1"
  9420. }
  9421. }
  9422. }
  9423. attr {
  9424. key: "_output_shapes"
  9425. value {
  9426. list {
  9427. shape {
  9428. dim {
  9429. size: 500
  9430. }
  9431. dim {
  9432. size: 10
  9433. }
  9434. }
  9435. }
  9436. }
  9437. }
  9438. }
  9439. node {
  9440. name: "gradients/layer1_1/Relu_grad/ReluGrad"
  9441. op: "ReluGrad"
  9442. input: "gradients/layer2_1/MatMul_grad/tuple/control_dependency"
  9443. input: "layer1_1/Relu"
  9444. attr {
  9445. key: "T"
  9446. value {
  9447. type: DT_FLOAT
  9448. }
  9449. }
  9450. attr {
  9451. key: "_output_shapes"
  9452. value {
  9453. list {
  9454. shape {
  9455. dim {
  9456. size: -1
  9457. }
  9458. dim {
  9459. size: 500
  9460. }
  9461. }
  9462. }
  9463. }
  9464. }
  9465. }
  9466. node {
  9467. name: "gradients/layer1_1/add_grad/Shape"
  9468. op: "Shape"
  9469. input: "layer1_1/MatMul"
  9470. attr {
  9471. key: "T"
  9472. value {
  9473. type: DT_FLOAT
  9474. }
  9475. }
  9476. attr {
  9477. key: "_output_shapes"
  9478. value {
  9479. list {
  9480. shape {
  9481. dim {
  9482. size: 2
  9483. }
  9484. }
  9485. }
  9486. }
  9487. }
  9488. attr {
  9489. key: "out_type"
  9490. value {
  9491. type: DT_INT32
  9492. }
  9493. }
  9494. }
  9495. node {
  9496. name: "gradients/layer1_1/add_grad/Shape_1"
  9497. op: "Const"
  9498. attr {
  9499. key: "_output_shapes"
  9500. value {
  9501. list {
  9502. shape {
  9503. dim {
  9504. size: 1
  9505. }
  9506. }
  9507. }
  9508. }
  9509. }
  9510. attr {
  9511. key: "dtype"
  9512. value {
  9513. type: DT_INT32
  9514. }
  9515. }
  9516. attr {
  9517. key: "value"
  9518. value {
  9519. tensor {
  9520. dtype: DT_INT32
  9521. tensor_shape {
  9522. dim {
  9523. size: 1
  9524. }
  9525. }
  9526. int_val: 500
  9527. }
  9528. }
  9529. }
  9530. }
  9531. node {
  9532. name: "gradients/layer1_1/add_grad/BroadcastGradientArgs"
  9533. op: "BroadcastGradientArgs"
  9534. input: "gradients/layer1_1/add_grad/Shape"
  9535. input: "gradients/layer1_1/add_grad/Shape_1"
  9536. attr {
  9537. key: "T"
  9538. value {
  9539. type: DT_INT32
  9540. }
  9541. }
  9542. attr {
  9543. key: "_output_shapes"
  9544. value {
  9545. list {
  9546. shape {
  9547. dim {
  9548. size: -1
  9549. }
  9550. }
  9551. shape {
  9552. dim {
  9553. size: -1
  9554. }
  9555. }
  9556. }
  9557. }
  9558. }
  9559. }
  9560. node {
  9561. name: "gradients/layer1_1/add_grad/Sum"
  9562. op: "Sum"
  9563. input: "gradients/layer1_1/Relu_grad/ReluGrad"
  9564. input: "gradients/layer1_1/add_grad/BroadcastGradientArgs"
  9565. attr {
  9566. key: "T"
  9567. value {
  9568. type: DT_FLOAT
  9569. }
  9570. }
  9571. attr {
  9572. key: "Tidx"
  9573. value {
  9574. type: DT_INT32
  9575. }
  9576. }
  9577. attr {
  9578. key: "_output_shapes"
  9579. value {
  9580. list {
  9581. shape {
  9582. unknown_rank: true
  9583. }
  9584. }
  9585. }
  9586. }
  9587. attr {
  9588. key: "keep_dims"
  9589. value {
  9590. b: false
  9591. }
  9592. }
  9593. }
  9594. node {
  9595. name: "gradients/layer1_1/add_grad/Reshape"
  9596. op: "Reshape"
  9597. input: "gradients/layer1_1/add_grad/Sum"
  9598. input: "gradients/layer1_1/add_grad/Shape"
  9599. attr {
  9600. key: "T"
  9601. value {
  9602. type: DT_FLOAT
  9603. }
  9604. }
  9605. attr {
  9606. key: "Tshape"
  9607. value {
  9608. type: DT_INT32
  9609. }
  9610. }
  9611. attr {
  9612. key: "_output_shapes"
  9613. value {
  9614. list {
  9615. shape {
  9616. dim {
  9617. size: -1
  9618. }
  9619. dim {
  9620. size: 500
  9621. }
  9622. }
  9623. }
  9624. }
  9625. }
  9626. }
  9627. node {
  9628. name: "gradients/layer1_1/add_grad/Sum_1"
  9629. op: "Sum"
  9630. input: "gradients/layer1_1/Relu_grad/ReluGrad"
  9631. input: "gradients/layer1_1/add_grad/BroadcastGradientArgs:1"
  9632. attr {
  9633. key: "T"
  9634. value {
  9635. type: DT_FLOAT
  9636. }
  9637. }
  9638. attr {
  9639. key: "Tidx"
  9640. value {
  9641. type: DT_INT32
  9642. }
  9643. }
  9644. attr {
  9645. key: "_output_shapes"
  9646. value {
  9647. list {
  9648. shape {
  9649. unknown_rank: true
  9650. }
  9651. }
  9652. }
  9653. }
  9654. attr {
  9655. key: "keep_dims"
  9656. value {
  9657. b: false
  9658. }
  9659. }
  9660. }
  9661. node {
  9662. name: "gradients/layer1_1/add_grad/Reshape_1"
  9663. op: "Reshape"
  9664. input: "gradients/layer1_1/add_grad/Sum_1"
  9665. input: "gradients/layer1_1/add_grad/Shape_1"
  9666. attr {
  9667. key: "T"
  9668. value {
  9669. type: DT_FLOAT
  9670. }
  9671. }
  9672. attr {
  9673. key: "Tshape"
  9674. value {
  9675. type: DT_INT32
  9676. }
  9677. }
  9678. attr {
  9679. key: "_output_shapes"
  9680. value {
  9681. list {
  9682. shape {
  9683. dim {
  9684. size: 500
  9685. }
  9686. }
  9687. }
  9688. }
  9689. }
  9690. }
  9691. node {
  9692. name: "gradients/layer1_1/add_grad/tuple/group_deps"
  9693. op: "NoOp"
  9694. input: "^gradients/layer1_1/add_grad/Reshape"
  9695. input: "^gradients/layer1_1/add_grad/Reshape_1"
  9696. }
  9697. node {
  9698. name: "gradients/layer1_1/add_grad/tuple/control_dependency"
  9699. op: "Identity"
  9700. input: "gradients/layer1_1/add_grad/Reshape"
  9701. input: "^gradients/layer1_1/add_grad/tuple/group_deps"
  9702. attr {
  9703. key: "T"
  9704. value {
  9705. type: DT_FLOAT
  9706. }
  9707. }
  9708. attr {
  9709. key: "_class"
  9710. value {
  9711. list {
  9712. s: "loc:@gradients/layer1_1/add_grad/Reshape"
  9713. }
  9714. }
  9715. }
  9716. attr {
  9717. key: "_output_shapes"
  9718. value {
  9719. list {
  9720. shape {
  9721. dim {
  9722. size: -1
  9723. }
  9724. dim {
  9725. size: 500
  9726. }
  9727. }
  9728. }
  9729. }
  9730. }
  9731. }
  9732. node {
  9733. name: "gradients/layer1_1/add_grad/tuple/control_dependency_1"
  9734. op: "Identity"
  9735. input: "gradients/layer1_1/add_grad/Reshape_1"
  9736. input: "^gradients/layer1_1/add_grad/tuple/group_deps"
  9737. attr {
  9738. key: "T"
  9739. value {
  9740. type: DT_FLOAT
  9741. }
  9742. }
  9743. attr {
  9744. key: "_class"
  9745. value {
  9746. list {
  9747. s: "loc:@gradients/layer1_1/add_grad/Reshape_1"
  9748. }
  9749. }
  9750. }
  9751. attr {
  9752. key: "_output_shapes"
  9753. value {
  9754. list {
  9755. shape {
  9756. dim {
  9757. size: 500
  9758. }
  9759. }
  9760. }
  9761. }
  9762. }
  9763. }
  9764. node {
  9765. name: "gradients/AddN"
  9766. op: "AddN"
  9767. input: "gradients/l2_regularizer_1/L2Loss_grad/mul"
  9768. input: "gradients/layer2_1/MatMul_grad/tuple/control_dependency_1"
  9769. attr {
  9770. key: "N"
  9771. value {
  9772. i: 2
  9773. }
  9774. }
  9775. attr {
  9776. key: "T"
  9777. value {
  9778. type: DT_FLOAT
  9779. }
  9780. }
  9781. attr {
  9782. key: "_class"
  9783. value {
  9784. list {
  9785. s: "loc:@gradients/l2_regularizer_1/L2Loss_grad/mul"
  9786. }
  9787. }
  9788. }
  9789. attr {
  9790. key: "_output_shapes"
  9791. value {
  9792. list {
  9793. shape {
  9794. dim {
  9795. size: 500
  9796. }
  9797. dim {
  9798. size: 10
  9799. }
  9800. }
  9801. }
  9802. }
  9803. }
  9804. }
  9805. node {
  9806. name: "gradients/layer1_1/MatMul_grad/MatMul"
  9807. op: "MatMul"
  9808. input: "gradients/layer1_1/add_grad/tuple/control_dependency"
  9809. input: "layer1/weights/read"
  9810. attr {
  9811. key: "T"
  9812. value {
  9813. type: DT_FLOAT
  9814. }
  9815. }
  9816. attr {
  9817. key: "_output_shapes"
  9818. value {
  9819. list {
  9820. shape {
  9821. dim {
  9822. size: -1
  9823. }
  9824. dim {
  9825. size: 784
  9826. }
  9827. }
  9828. }
  9829. }
  9830. }
  9831. attr {
  9832. key: "transpose_a"
  9833. value {
  9834. b: false
  9835. }
  9836. }
  9837. attr {
  9838. key: "transpose_b"
  9839. value {
  9840. b: true
  9841. }
  9842. }
  9843. }
  9844. node {
  9845. name: "gradients/layer1_1/MatMul_grad/MatMul_1"
  9846. op: "MatMul"
  9847. input: "x-input"
  9848. input: "gradients/layer1_1/add_grad/tuple/control_dependency"
  9849. attr {
  9850. key: "T"
  9851. value {
  9852. type: DT_FLOAT
  9853. }
  9854. }
  9855. attr {
  9856. key: "_output_shapes"
  9857. value {
  9858. list {
  9859. shape {
  9860. dim {
  9861. size: 784
  9862. }
  9863. dim {
  9864. size: 500
  9865. }
  9866. }
  9867. }
  9868. }
  9869. }
  9870. attr {
  9871. key: "transpose_a"
  9872. value {
  9873. b: true
  9874. }
  9875. }
  9876. attr {
  9877. key: "transpose_b"
  9878. value {
  9879. b: false
  9880. }
  9881. }
  9882. }
  9883. node {
  9884. name: "gradients/layer1_1/MatMul_grad/tuple/group_deps"
  9885. op: "NoOp"
  9886. input: "^gradients/layer1_1/MatMul_grad/MatMul"
  9887. input: "^gradients/layer1_1/MatMul_grad/MatMul_1"
  9888. }
  9889. node {
  9890. name: "gradients/layer1_1/MatMul_grad/tuple/control_dependency"
  9891. op: "Identity"
  9892. input: "gradients/layer1_1/MatMul_grad/MatMul"
  9893. input: "^gradients/layer1_1/MatMul_grad/tuple/group_deps"
  9894. attr {
  9895. key: "T"
  9896. value {
  9897. type: DT_FLOAT
  9898. }
  9899. }
  9900. attr {
  9901. key: "_class"
  9902. value {
  9903. list {
  9904. s: "loc:@gradients/layer1_1/MatMul_grad/MatMul"
  9905. }
  9906. }
  9907. }
  9908. attr {
  9909. key: "_output_shapes"
  9910. value {
  9911. list {
  9912. shape {
  9913. dim {
  9914. size: -1
  9915. }
  9916. dim {
  9917. size: 784
  9918. }
  9919. }
  9920. }
  9921. }
  9922. }
  9923. }
  9924. node {
  9925. name: "gradients/layer1_1/MatMul_grad/tuple/control_dependency_1"
  9926. op: "Identity"
  9927. input: "gradients/layer1_1/MatMul_grad/MatMul_1"
  9928. input: "^gradients/layer1_1/MatMul_grad/tuple/group_deps"
  9929. attr {
  9930. key: "T"
  9931. value {
  9932. type: DT_FLOAT
  9933. }
  9934. }
  9935. attr {
  9936. key: "_class"
  9937. value {
  9938. list {
  9939. s: "loc:@gradients/layer1_1/MatMul_grad/MatMul_1"
  9940. }
  9941. }
  9942. }
  9943. attr {
  9944. key: "_output_shapes"
  9945. value {
  9946. list {
  9947. shape {
  9948. dim {
  9949. size: 784
  9950. }
  9951. dim {
  9952. size: 500
  9953. }
  9954. }
  9955. }
  9956. }
  9957. }
  9958. }
  9959. node {
  9960. name: "gradients/AddN_1"
  9961. op: "AddN"
  9962. input: "gradients/l2_regularizer/L2Loss_grad/mul"
  9963. input: "gradients/layer1_1/MatMul_grad/tuple/control_dependency_1"
  9964. attr {
  9965. key: "N"
  9966. value {
  9967. i: 2
  9968. }
  9969. }
  9970. attr {
  9971. key: "T"
  9972. value {
  9973. type: DT_FLOAT
  9974. }
  9975. }
  9976. attr {
  9977. key: "_class"
  9978. value {
  9979. list {
  9980. s: "loc:@gradients/l2_regularizer/L2Loss_grad/mul"
  9981. }
  9982. }
  9983. }
  9984. attr {
  9985. key: "_output_shapes"
  9986. value {
  9987. list {
  9988. shape {
  9989. dim {
  9990. size: 784
  9991. }
  9992. dim {
  9993. size: 500
  9994. }
  9995. }
  9996. }
  9997. }
  9998. }
  9999. }
  10000. node {
  10001. name: "GradientDescent/update_layer1/weights/ApplyGradientDescent"
  10002. op: "ApplyGradientDescent"
  10003. input: "layer1/weights"
  10004. input: "ExponentialDecay"
  10005. input: "gradients/AddN_1"
  10006. attr {
  10007. key: "T"
  10008. value {
  10009. type: DT_FLOAT
  10010. }
  10011. }
  10012. attr {
  10013. key: "_class"
  10014. value {
  10015. list {
  10016. s: "loc:@layer1/weights"
  10017. }
  10018. }
  10019. }
  10020. attr {
  10021. key: "_output_shapes"
  10022. value {
  10023. list {
  10024. shape {
  10025. dim {
  10026. size: 784
  10027. }
  10028. dim {
  10029. size: 500
  10030. }
  10031. }
  10032. }
  10033. }
  10034. }
  10035. attr {
  10036. key: "use_locking"
  10037. value {
  10038. b: false
  10039. }
  10040. }
  10041. }
  10042. node {
  10043. name: "GradientDescent/update_layer1/biases/ApplyGradientDescent"
  10044. op: "ApplyGradientDescent"
  10045. input: "layer1/biases"
  10046. input: "ExponentialDecay"
  10047. input: "gradients/layer1_1/add_grad/tuple/control_dependency_1"
  10048. attr {
  10049. key: "T"
  10050. value {
  10051. type: DT_FLOAT
  10052. }
  10053. }
  10054. attr {
  10055. key: "_class"
  10056. value {
  10057. list {
  10058. s: "loc:@layer1/biases"
  10059. }
  10060. }
  10061. }
  10062. attr {
  10063. key: "_output_shapes"
  10064. value {
  10065. list {
  10066. shape {
  10067. dim {
  10068. size: 500
  10069. }
  10070. }
  10071. }
  10072. }
  10073. }
  10074. attr {
  10075. key: "use_locking"
  10076. value {
  10077. b: false
  10078. }
  10079. }
  10080. }
  10081. node {
  10082. name: "GradientDescent/update_layer2/weights/ApplyGradientDescent"
  10083. op: "ApplyGradientDescent"
  10084. input: "layer2/weights"
  10085. input: "ExponentialDecay"
  10086. input: "gradients/AddN"
  10087. attr {
  10088. key: "T"
  10089. value {
  10090. type: DT_FLOAT
  10091. }
  10092. }
  10093. attr {
  10094. key: "_class"
  10095. value {
  10096. list {
  10097. s: "loc:@layer2/weights"
  10098. }
  10099. }
  10100. }
  10101. attr {
  10102. key: "_output_shapes"
  10103. value {
  10104. list {
  10105. shape {
  10106. dim {
  10107. size: 500
  10108. }
  10109. dim {
  10110. size: 10
  10111. }
  10112. }
  10113. }
  10114. }
  10115. }
  10116. attr {
  10117. key: "use_locking"
  10118. value {
  10119. b: false
  10120. }
  10121. }
  10122. }
  10123. node {
  10124. name: "GradientDescent/update_layer2/biases/ApplyGradientDescent"
  10125. op: "ApplyGradientDescent"
  10126. input: "layer2/biases"
  10127. input: "ExponentialDecay"
  10128. input: "gradients/layer2_1/add_grad/tuple/control_dependency_1"
  10129. attr {
  10130. key: "T"
  10131. value {
  10132. type: DT_FLOAT
  10133. }
  10134. }
  10135. attr {
  10136. key: "_class"
  10137. value {
  10138. list {
  10139. s: "loc:@layer2/biases"
  10140. }
  10141. }
  10142. }
  10143. attr {
  10144. key: "_output_shapes"
  10145. value {
  10146. list {
  10147. shape {
  10148. dim {
  10149. size: 10
  10150. }
  10151. }
  10152. }
  10153. }
  10154. }
  10155. attr {
  10156. key: "use_locking"
  10157. value {
  10158. b: false
  10159. }
  10160. }
  10161. }
  10162. node {
  10163. name: "GradientDescent/update"
  10164. op: "NoOp"
  10165. input: "^GradientDescent/update_layer1/biases/ApplyGradientDescent"
  10166. input: "^GradientDescent/update_layer1/weights/ApplyGradientDescent"
  10167. input: "^GradientDescent/update_layer2/biases/ApplyGradientDescent"
  10168. input: "^GradientDescent/update_layer2/weights/ApplyGradientDescent"
  10169. }
  10170. node {
  10171. name: "GradientDescent/value"
  10172. op: "Const"
  10173. input: "^GradientDescent/update"
  10174. attr {
  10175. key: "_class"
  10176. value {
  10177. list {
  10178. s: "loc:@Variable"
  10179. }
  10180. }
  10181. }
  10182. attr {
  10183. key: "_output_shapes"
  10184. value {
  10185. list {
  10186. shape {
  10187. }
  10188. }
  10189. }
  10190. }
  10191. attr {
  10192. key: "dtype"
  10193. value {
  10194. type: DT_INT32
  10195. }
  10196. }
  10197. attr {
  10198. key: "value"
  10199. value {
  10200. tensor {
  10201. dtype: DT_INT32
  10202. tensor_shape {
  10203. }
  10204. int_val: 1
  10205. }
  10206. }
  10207. }
  10208. }
  10209. node {
  10210. name: "GradientDescent"
  10211. op: "AssignAdd"
  10212. input: "Variable"
  10213. input: "GradientDescent/value"
  10214. attr {
  10215. key: "T"
  10216. value {
  10217. type: DT_INT32
  10218. }
  10219. }
  10220. attr {
  10221. key: "_class"
  10222. value {
  10223. list {
  10224. s: "loc:@Variable"
  10225. }
  10226. }
  10227. }
  10228. attr {
  10229. key: "_output_shapes"
  10230. value {
  10231. list {
  10232. shape {
  10233. }
  10234. }
  10235. }
  10236. }
  10237. attr {
  10238. key: "use_locking"
  10239. value {
  10240. b: false
  10241. }
  10242. }
  10243. }
  10244. node {
  10245. name: "train"
  10246. op: "NoOp"
  10247. input: "^ExponentialMovingAverage"
  10248. input: "^GradientDescent"
  10249. }
  10250. node {
  10251. name: "ArgMax_1/dimension"
  10252. op: "Const"
  10253. attr {
  10254. key: "_output_shapes"
  10255. value {
  10256. list {
  10257. shape {
  10258. }
  10259. }
  10260. }
  10261. }
  10262. attr {
  10263. key: "dtype"
  10264. value {
  10265. type: DT_INT32
  10266. }
  10267. }
  10268. attr {
  10269. key: "value"
  10270. value {
  10271. tensor {
  10272. dtype: DT_INT32
  10273. tensor_shape {
  10274. }
  10275. int_val: 1
  10276. }
  10277. }
  10278. }
  10279. }
  10280. node {
  10281. name: "ArgMax_1"
  10282. op: "ArgMax"
  10283. input: "layer2_1/add"
  10284. input: "ArgMax_1/dimension"
  10285. attr {
  10286. key: "T"
  10287. value {
  10288. type: DT_FLOAT
  10289. }
  10290. }
  10291. attr {
  10292. key: "Tidx"
  10293. value {
  10294. type: DT_INT32
  10295. }
  10296. }
  10297. attr {
  10298. key: "_output_shapes"
  10299. value {
  10300. list {
  10301. shape {
  10302. dim {
  10303. size: -1
  10304. }
  10305. }
  10306. }
  10307. }
  10308. }
  10309. attr {
  10310. key: "output_type"
  10311. value {
  10312. type: DT_INT64
  10313. }
  10314. }
  10315. }
  10316. node {
  10317. name: "ArgMax_2/dimension"
  10318. op: "Const"
  10319. attr {
  10320. key: "_output_shapes"
  10321. value {
  10322. list {
  10323. shape {
  10324. }
  10325. }
  10326. }
  10327. }
  10328. attr {
  10329. key: "dtype"
  10330. value {
  10331. type: DT_INT32
  10332. }
  10333. }
  10334. attr {
  10335. key: "value"
  10336. value {
  10337. tensor {
  10338. dtype: DT_INT32
  10339. tensor_shape {
  10340. }
  10341. int_val: 1
  10342. }
  10343. }
  10344. }
  10345. }
  10346. node {
  10347. name: "ArgMax_2"
  10348. op: "ArgMax"
  10349. input: "layer2/add"
  10350. input: "ArgMax_2/dimension"
  10351. attr {
  10352. key: "T"
  10353. value {
  10354. type: DT_FLOAT
  10355. }
  10356. }
  10357. attr {
  10358. key: "Tidx"
  10359. value {
  10360. type: DT_INT32
  10361. }
  10362. }
  10363. attr {
  10364. key: "_output_shapes"
  10365. value {
  10366. list {
  10367. shape {
  10368. dim {
  10369. size: -1
  10370. }
  10371. }
  10372. }
  10373. }
  10374. }
  10375. attr {
  10376. key: "output_type"
  10377. value {
  10378. type: DT_INT64
  10379. }
  10380. }
  10381. }
  10382. node {
  10383. name: "Equal"
  10384. op: "Equal"
  10385. input: "ArgMax_1"
  10386. input: "ArgMax_2"
  10387. attr {
  10388. key: "T"
  10389. value {
  10390. type: DT_INT64
  10391. }
  10392. }
  10393. attr {
  10394. key: "_output_shapes"
  10395. value {
  10396. list {
  10397. shape {
  10398. dim {
  10399. size: -1
  10400. }
  10401. }
  10402. }
  10403. }
  10404. }
  10405. }
  10406. node {
  10407. name: "Cast"
  10408. op: "Cast"
  10409. input: "Equal"
  10410. attr {
  10411. key: "DstT"
  10412. value {
  10413. type: DT_FLOAT
  10414. }
  10415. }
  10416. attr {
  10417. key: "SrcT"
  10418. value {
  10419. type: DT_BOOL
  10420. }
  10421. }
  10422. attr {
  10423. key: "_output_shapes"
  10424. value {
  10425. list {
  10426. shape {
  10427. dim {
  10428. size: -1
  10429. }
  10430. }
  10431. }
  10432. }
  10433. }
  10434. }
  10435. node {
  10436. name: "Const_1"
  10437. op: "Const"
  10438. attr {
  10439. key: "_output_shapes"
  10440. value {
  10441. list {
  10442. shape {
  10443. dim {
  10444. size: 1
  10445. }
  10446. }
  10447. }
  10448. }
  10449. }
  10450. attr {
  10451. key: "dtype"
  10452. value {
  10453. type: DT_INT32
  10454. }
  10455. }
  10456. attr {
  10457. key: "value"
  10458. value {
  10459. tensor {
  10460. dtype: DT_INT32
  10461. tensor_shape {
  10462. dim {
  10463. size: 1
  10464. }
  10465. }
  10466. int_val: 0
  10467. }
  10468. }
  10469. }
  10470. }
  10471. node {
  10472. name: "Mean_1"
  10473. op: "Mean"
  10474. input: "Cast"
  10475. input: "Const_1"
  10476. attr {
  10477. key: "T"
  10478. value {
  10479. type: DT_FLOAT
  10480. }
  10481. }
  10482. attr {
  10483. key: "Tidx"
  10484. value {
  10485. type: DT_INT32
  10486. }
  10487. }
  10488. attr {
  10489. key: "_output_shapes"
  10490. value {
  10491. list {
  10492. shape {
  10493. }
  10494. }
  10495. }
  10496. }
  10497. attr {
  10498. key: "keep_dims"
  10499. value {
  10500. b: false
  10501. }
  10502. }
  10503. }
  10504. node {
  10505. name: "init"
  10506. op: "NoOp"
  10507. input: "^Variable/Assign"
  10508. input: "^layer1/biases/Assign"
  10509. input: "^layer1/biases/ExponentialMovingAverage/Assign"
  10510. input: "^layer1/weights/Assign"
  10511. input: "^layer1/weights/ExponentialMovingAverage/Assign"
  10512. input: "^layer2/biases/Assign"
  10513. input: "^layer2/biases/ExponentialMovingAverage/Assign"
  10514. input: "^layer2/weights/Assign"
  10515. input: "^layer2/weights/ExponentialMovingAverage/Assign"
  10516. }
  10517. node {
  10518. name: "save/Const"
  10519. op: "Const"
  10520. attr {
  10521. key: "_output_shapes"
  10522. value {
  10523. list {
  10524. shape {
  10525. }
  10526. }
  10527. }
  10528. }
  10529. attr {
  10530. key: "dtype"
  10531. value {
  10532. type: DT_STRING
  10533. }
  10534. }
  10535. attr {
  10536. key: "value"
  10537. value {
  10538. tensor {
  10539. dtype: DT_STRING
  10540. tensor_shape {
  10541. }
  10542. string_val: "model"
  10543. }
  10544. }
  10545. }
  10546. }
  10547. node {
  10548. name: "save/SaveV2/tensor_names"
  10549. op: "Const"
  10550. attr {
  10551. key: "_output_shapes"
  10552. value {
  10553. list {
  10554. shape {
  10555. dim {
  10556. size: 9
  10557. }
  10558. }
  10559. }
  10560. }
  10561. }
  10562. attr {
  10563. key: "dtype"
  10564. value {
  10565. type: DT_STRING
  10566. }
  10567. }
  10568. attr {
  10569. key: "value"
  10570. value {
  10571. tensor {
  10572. dtype: DT_STRING
  10573. tensor_shape {
  10574. dim {
  10575. size: 9
  10576. }
  10577. }
  10578. string_val: "Variable"
  10579. string_val: "layer1/biases"
  10580. string_val: "layer1/biases/ExponentialMovingAverage"
  10581. string_val: "layer1/weights"
  10582. string_val: "layer1/weights/ExponentialMovingAverage"
  10583. string_val: "layer2/biases"
  10584. string_val: "layer2/biases/ExponentialMovingAverage"
  10585. string_val: "layer2/weights"
  10586. string_val: "layer2/weights/ExponentialMovingAverage"
  10587. }
  10588. }
  10589. }
  10590. }
  10591. node {
  10592. name: "save/SaveV2/shape_and_slices"
  10593. op: "Const"
  10594. attr {
  10595. key: "_output_shapes"
  10596. value {
  10597. list {
  10598. shape {
  10599. dim {
  10600. size: 9
  10601. }
  10602. }
  10603. }
  10604. }
  10605. }
  10606. attr {
  10607. key: "dtype"
  10608. value {
  10609. type: DT_STRING
  10610. }
  10611. }
  10612. attr {
  10613. key: "value"
  10614. value {
  10615. tensor {
  10616. dtype: DT_STRING
  10617. tensor_shape {
  10618. dim {
  10619. size: 9
  10620. }
  10621. }
  10622. string_val: ""
  10623. string_val: ""
  10624. string_val: ""
  10625. string_val: ""
  10626. string_val: ""
  10627. string_val: ""
  10628. string_val: ""
  10629. string_val: ""
  10630. string_val: ""
  10631. }
  10632. }
  10633. }
  10634. }
  10635. node {
  10636. name: "save/SaveV2"
  10637. op: "SaveV2"
  10638. input: "save/Const"
  10639. input: "save/SaveV2/tensor_names"
  10640. input: "save/SaveV2/shape_and_slices"
  10641. input: "Variable"
  10642. input: "layer1/biases"
  10643. input: "layer1/biases/ExponentialMovingAverage"
  10644. input: "layer1/weights"
  10645. input: "layer1/weights/ExponentialMovingAverage"
  10646. input: "layer2/biases"
  10647. input: "layer2/biases/ExponentialMovingAverage"
  10648. input: "layer2/weights"
  10649. input: "layer2/weights/ExponentialMovingAverage"
  10650. attr {
  10651. key: "dtypes"
  10652. value {
  10653. list {
  10654. type: DT_INT32
  10655. type: DT_FLOAT
  10656. type: DT_FLOAT
  10657. type: DT_FLOAT
  10658. type: DT_FLOAT
  10659. type: DT_FLOAT
  10660. type: DT_FLOAT
  10661. type: DT_FLOAT
  10662. type: DT_FLOAT
  10663. }
  10664. }
  10665. }
  10666. }
  10667. node {
  10668. name: "save/control_dependency"
  10669. op: "Identity"
  10670. input: "save/Const"
  10671. input: "^save/SaveV2"
  10672. attr {
  10673. key: "T"
  10674. value {
  10675. type: DT_STRING
  10676. }
  10677. }
  10678. attr {
  10679. key: "_class"
  10680. value {
  10681. list {
  10682. s: "loc:@save/Const"
  10683. }
  10684. }
  10685. }
  10686. attr {
  10687. key: "_output_shapes"
  10688. value {
  10689. list {
  10690. shape {
  10691. }
  10692. }
  10693. }
  10694. }
  10695. }
  10696. node {
  10697. name: "save/RestoreV2/tensor_names"
  10698. op: "Const"
  10699. device: "/device:CPU:0"
  10700. attr {
  10701. key: "_output_shapes"
  10702. value {
  10703. list {
  10704. shape {
  10705. dim {
  10706. size: 9
  10707. }
  10708. }
  10709. }
  10710. }
  10711. }
  10712. attr {
  10713. key: "dtype"
  10714. value {
  10715. type: DT_STRING
  10716. }
  10717. }
  10718. attr {
  10719. key: "value"
  10720. value {
  10721. tensor {
  10722. dtype: DT_STRING
  10723. tensor_shape {
  10724. dim {
  10725. size: 9
  10726. }
  10727. }
  10728. string_val: "Variable"
  10729. string_val: "layer1/biases"
  10730. string_val: "layer1/biases/ExponentialMovingAverage"
  10731. string_val: "layer1/weights"
  10732. string_val: "layer1/weights/ExponentialMovingAverage"
  10733. string_val: "layer2/biases"
  10734. string_val: "layer2/biases/ExponentialMovingAverage"
  10735. string_val: "layer2/weights"
  10736. string_val: "layer2/weights/ExponentialMovingAverage"
  10737. }
  10738. }
  10739. }
  10740. }
  10741. node {
  10742. name: "save/RestoreV2/shape_and_slices"
  10743. op: "Const"
  10744. device: "/device:CPU:0"
  10745. attr {
  10746. key: "_output_shapes"
  10747. value {
  10748. list {
  10749. shape {
  10750. dim {
  10751. size: 9
  10752. }
  10753. }
  10754. }
  10755. }
  10756. }
  10757. attr {
  10758. key: "dtype"
  10759. value {
  10760. type: DT_STRING
  10761. }
  10762. }
  10763. attr {
  10764. key: "value"
  10765. value {
  10766. tensor {
  10767. dtype: DT_STRING
  10768. tensor_shape {
  10769. dim {
  10770. size: 9
  10771. }
  10772. }
  10773. string_val: ""
  10774. string_val: ""
  10775. string_val: ""
  10776. string_val: ""
  10777. string_val: ""
  10778. string_val: ""
  10779. string_val: ""
  10780. string_val: ""
  10781. string_val: ""
  10782. }
  10783. }
  10784. }
  10785. }
  10786. node {
  10787. name: "save/RestoreV2"
  10788. op: "RestoreV2"
  10789. input: "save/Const"
  10790. input: "save/RestoreV2/tensor_names"
  10791. input: "save/RestoreV2/shape_and_slices"
  10792. device: "/device:CPU:0"
  10793. attr {
  10794. key: "_output_shapes"
  10795. value {
  10796. list {
  10797. shape {
  10798. unknown_rank: true
  10799. }
  10800. shape {
  10801. unknown_rank: true
  10802. }
  10803. shape {
  10804. unknown_rank: true
  10805. }
  10806. shape {
  10807. unknown_rank: true
  10808. }
  10809. shape {
  10810. unknown_rank: true
  10811. }
  10812. shape {
  10813. unknown_rank: true
  10814. }
  10815. shape {
  10816. unknown_rank: true
  10817. }
  10818. shape {
  10819. unknown_rank: true
  10820. }
  10821. shape {
  10822. unknown_rank: true
  10823. }
  10824. }
  10825. }
  10826. }
  10827. attr {
  10828. key: "dtypes"
  10829. value {
  10830. list {
  10831. type: DT_INT32
  10832. type: DT_FLOAT
  10833. type: DT_FLOAT
  10834. type: DT_FLOAT
  10835. type: DT_FLOAT
  10836. type: DT_FLOAT
  10837. type: DT_FLOAT
  10838. type: DT_FLOAT
  10839. type: DT_FLOAT
  10840. }
  10841. }
  10842. }
  10843. }
  10844. node {
  10845. name: "save/Assign"
  10846. op: "Assign"
  10847. input: "Variable"
  10848. input: "save/RestoreV2"
  10849. attr {
  10850. key: "T"
  10851. value {
  10852. type: DT_INT32
  10853. }
  10854. }
  10855. attr {
  10856. key: "_class"
  10857. value {
  10858. list {
  10859. s: "loc:@Variable"
  10860. }
  10861. }
  10862. }
  10863. attr {
  10864. key: "_output_shapes"
  10865. value {
  10866. list {
  10867. shape {
  10868. }
  10869. }
  10870. }
  10871. }
  10872. attr {
  10873. key: "use_locking"
  10874. value {
  10875. b: true
  10876. }
  10877. }
  10878. attr {
  10879. key: "validate_shape"
  10880. value {
  10881. b: true
  10882. }
  10883. }
  10884. }
  10885. node {
  10886. name: "save/Assign_1"
  10887. op: "Assign"
  10888. input: "layer1/biases"
  10889. input: "save/RestoreV2:1"
  10890. attr {
  10891. key: "T"
  10892. value {
  10893. type: DT_FLOAT
  10894. }
  10895. }
  10896. attr {
  10897. key: "_class"
  10898. value {
  10899. list {
  10900. s: "loc:@layer1/biases"
  10901. }
  10902. }
  10903. }
  10904. attr {
  10905. key: "_output_shapes"
  10906. value {
  10907. list {
  10908. shape {
  10909. dim {
  10910. size: 500
  10911. }
  10912. }
  10913. }
  10914. }
  10915. }
  10916. attr {
  10917. key: "use_locking"
  10918. value {
  10919. b: true
  10920. }
  10921. }
  10922. attr {
  10923. key: "validate_shape"
  10924. value {
  10925. b: true
  10926. }
  10927. }
  10928. }
  10929. node {
  10930. name: "save/Assign_2"
  10931. op: "Assign"
  10932. input: "layer1/biases/ExponentialMovingAverage"
  10933. input: "save/RestoreV2:2"
  10934. attr {
  10935. key: "T"
  10936. value {
  10937. type: DT_FLOAT
  10938. }
  10939. }
  10940. attr {
  10941. key: "_class"
  10942. value {
  10943. list {
  10944. s: "loc:@layer1/biases"
  10945. }
  10946. }
  10947. }
  10948. attr {
  10949. key: "_output_shapes"
  10950. value {
  10951. list {
  10952. shape {
  10953. dim {
  10954. size: 500
  10955. }
  10956. }
  10957. }
  10958. }
  10959. }
  10960. attr {
  10961. key: "use_locking"
  10962. value {
  10963. b: true
  10964. }
  10965. }
  10966. attr {
  10967. key: "validate_shape"
  10968. value {
  10969. b: true
  10970. }
  10971. }
  10972. }
  10973. node {
  10974. name: "save/Assign_3"
  10975. op: "Assign"
  10976. input: "layer1/weights"
  10977. input: "save/RestoreV2:3"
  10978. attr {
  10979. key: "T"
  10980. value {
  10981. type: DT_FLOAT
  10982. }
  10983. }
  10984. attr {
  10985. key: "_class"
  10986. value {
  10987. list {
  10988. s: "loc:@layer1/weights"
  10989. }
  10990. }
  10991. }
  10992. attr {
  10993. key: "_output_shapes"
  10994. value {
  10995. list {
  10996. shape {
  10997. dim {
  10998. size: 784
  10999. }
  11000. dim {
  11001. size: 500
  11002. }
  11003. }
  11004. }
  11005. }
  11006. }
  11007. attr {
  11008. key: "use_locking"
  11009. value {
  11010. b: true
  11011. }
  11012. }
  11013. attr {
  11014. key: "validate_shape"
  11015. value {
  11016. b: true
  11017. }
  11018. }
  11019. }
  11020. node {
  11021. name: "save/Assign_4"
  11022. op: "Assign"
  11023. input: "layer1/weights/ExponentialMovingAverage"
  11024. input: "save/RestoreV2:4"
  11025. attr {
  11026. key: "T"
  11027. value {
  11028. type: DT_FLOAT
  11029. }
  11030. }
  11031. attr {
  11032. key: "_class"
  11033. value {
  11034. list {
  11035. s: "loc:@layer1/weights"
  11036. }
  11037. }
  11038. }
  11039. attr {
  11040. key: "_output_shapes"
  11041. value {
  11042. list {
  11043. shape {
  11044. dim {
  11045. size: 784
  11046. }
  11047. dim {
  11048. size: 500
  11049. }
  11050. }
  11051. }
  11052. }
  11053. }
  11054. attr {
  11055. key: "use_locking"
  11056. value {
  11057. b: true
  11058. }
  11059. }
  11060. attr {
  11061. key: "validate_shape"
  11062. value {
  11063. b: true
  11064. }
  11065. }
  11066. }
  11067. node {
  11068. name: "save/Assign_5"
  11069. op: "Assign"
  11070. input: "layer2/biases"
  11071. input: "save/RestoreV2:5"
  11072. attr {
  11073. key: "T"
  11074. value {
  11075. type: DT_FLOAT
  11076. }
  11077. }
  11078. attr {
  11079. key: "_class"
  11080. value {
  11081. list {
  11082. s: "loc:@layer2/biases"
  11083. }
  11084. }
  11085. }
  11086. attr {
  11087. key: "_output_shapes"
  11088. value {
  11089. list {
  11090. shape {
  11091. dim {
  11092. size: 10
  11093. }
  11094. }
  11095. }
  11096. }
  11097. }
  11098. attr {
  11099. key: "use_locking"
  11100. value {
  11101. b: true
  11102. }
  11103. }
  11104. attr {
  11105. key: "validate_shape"
  11106. value {
  11107. b: true
  11108. }
  11109. }
  11110. }
  11111. node {
  11112. name: "save/Assign_6"
  11113. op: "Assign"
  11114. input: "layer2/biases/ExponentialMovingAverage"
  11115. input: "save/RestoreV2:6"
  11116. attr {
  11117. key: "T"
  11118. value {
  11119. type: DT_FLOAT
  11120. }
  11121. }
  11122. attr {
  11123. key: "_class"
  11124. value {
  11125. list {
  11126. s: "loc:@layer2/biases"
  11127. }
  11128. }
  11129. }
  11130. attr {
  11131. key: "_output_shapes"
  11132. value {
  11133. list {
  11134. shape {
  11135. dim {
  11136. size: 10
  11137. }
  11138. }
  11139. }
  11140. }
  11141. }
  11142. attr {
  11143. key: "use_locking"
  11144. value {
  11145. b: true
  11146. }
  11147. }
  11148. attr {
  11149. key: "validate_shape"
  11150. value {
  11151. b: true
  11152. }
  11153. }
  11154. }
  11155. node {
  11156. name: "save/Assign_7"
  11157. op: "Assign"
  11158. input: "layer2/weights"
  11159. input: "save/RestoreV2:7"
  11160. attr {
  11161. key: "T"
  11162. value {
  11163. type: DT_FLOAT
  11164. }
  11165. }
  11166. attr {
  11167. key: "_class"
  11168. value {
  11169. list {
  11170. s: "loc:@layer2/weights"
  11171. }
  11172. }
  11173. }
  11174. attr {
  11175. key: "_output_shapes"
  11176. value {
  11177. list {
  11178. shape {
  11179. dim {
  11180. size: 500
  11181. }
  11182. dim {
  11183. size: 10
  11184. }
  11185. }
  11186. }
  11187. }
  11188. }
  11189. attr {
  11190. key: "use_locking"
  11191. value {
  11192. b: true
  11193. }
  11194. }
  11195. attr {
  11196. key: "validate_shape"
  11197. value {
  11198. b: true
  11199. }
  11200. }
  11201. }
  11202. node {
  11203. name: "save/Assign_8"
  11204. op: "Assign"
  11205. input: "layer2/weights/ExponentialMovingAverage"
  11206. input: "save/RestoreV2:8"
  11207. attr {
  11208. key: "T"
  11209. value {
  11210. type: DT_FLOAT
  11211. }
  11212. }
  11213. attr {
  11214. key: "_class"
  11215. value {
  11216. list {
  11217. s: "loc:@layer2/weights"
  11218. }
  11219. }
  11220. }
  11221. attr {
  11222. key: "_output_shapes"
  11223. value {
  11224. list {
  11225. shape {
  11226. dim {
  11227. size: 500
  11228. }
  11229. dim {
  11230. size: 10
  11231. }
  11232. }
  11233. }
  11234. }
  11235. }
  11236. attr {
  11237. key: "use_locking"
  11238. value {
  11239. b: true
  11240. }
  11241. }
  11242. attr {
  11243. key: "validate_shape"
  11244. value {
  11245. b: true
  11246. }
  11247. }
  11248. }
  11249. node {
  11250. name: "save/restore_all"
  11251. op: "NoOp"
  11252. input: "^save/Assign"
  11253. input: "^save/Assign_1"
  11254. input: "^save/Assign_2"
  11255. input: "^save/Assign_3"
  11256. input: "^save/Assign_4"
  11257. input: "^save/Assign_5"
  11258. input: "^save/Assign_6"
  11259. input: "^save/Assign_7"
  11260. input: "^save/Assign_8"
  11261. }
  11262. versions {
  11263. producer: 26
  11264. }
  11265. }
  11266. saver_def {
  11267. filename_tensor_name: "save/Const:0"
  11268. save_tensor_name: "save/control_dependency:0"
  11269. restore_op_name: "save/restore_all"
  11270. max_to_keep: 5
  11271. keep_checkpoint_every_n_hours: 10000.0
  11272. version: V2
  11273. }
  11274. collection_def {
  11275. key: "cond_context"
  11276. value {
  11277. bytes_list {
  11278. value: "\n\016cond/cond_text\022\016cond/pred_id:0\032\017cond/switch_t:0 \001*\302\001\n\016cond/pred_id:0\n\022cond/read/Switch:1\n\013cond/read:0\n\017cond/switch_t:0\n\020layer1/weights:0\022 \n\016cond/pred_id:0\022\016cond/pred_id:0\022&\n\020layer1/weights:0\022\022cond/read/Switch:1\022\"\n\017cond/switch_t:0\022\017cond/switch_t:0"
  11279. value: "\n\020cond/cond_text_1\022\016cond/pred_id:0\032\017cond/switch_f:0*\372\001\n\017cond/Switch_1:0\n\017cond/Switch_1:1\n\016cond/pred_id:0\n\017cond/switch_f:0\n-layer1/weights/Initializer/truncated_normal:0\022\"\n\017cond/switch_f:0\022\017cond/switch_f:0\022 \n\016cond/pred_id:0\022\016cond/pred_id:0\022@\n-layer1/weights/Initializer/truncated_normal:0\022\017cond/Switch_1:0"
  11280. value: "\n6layer1/weights/ExponentialMovingAverage/cond/cond_text\0226layer1/weights/ExponentialMovingAverage/cond/pred_id:0\0327layer1/weights/ExponentialMovingAverage/cond/switch_t:0 \001*\252\004\n6layer1/weights/ExponentialMovingAverage/cond/pred_id:0\n:layer1/weights/ExponentialMovingAverage/cond/read/Switch:1\n3layer1/weights/ExponentialMovingAverage/cond/read:0\n7layer1/weights/ExponentialMovingAverage/cond/switch_t:0\n\020layer1/weights:0\022p\n6layer1/weights/ExponentialMovingAverage/cond/pred_id:0\0226layer1/weights/ExponentialMovingAverage/cond/pred_id:0\022r\n7layer1/weights/ExponentialMovingAverage/cond/switch_t:0\0227layer1/weights/ExponentialMovingAverage/cond/switch_t:0\022N\n\020layer1/weights:0\022:layer1/weights/ExponentialMovingAverage/cond/read/Switch:1"
  11281. value: "\n8layer1/weights/ExponentialMovingAverage/cond/cond_text_1\0226layer1/weights/ExponentialMovingAverage/cond/pred_id:0\0327layer1/weights/ExponentialMovingAverage/cond/switch_f:0*\342\004\n7layer1/weights/ExponentialMovingAverage/cond/Switch_1:0\n7layer1/weights/ExponentialMovingAverage/cond/Switch_1:1\n6layer1/weights/ExponentialMovingAverage/cond/pred_id:0\n7layer1/weights/ExponentialMovingAverage/cond/switch_f:0\n-layer1/weights/Initializer/truncated_normal:0\022r\n7layer1/weights/ExponentialMovingAverage/cond/switch_f:0\0227layer1/weights/ExponentialMovingAverage/cond/switch_f:0\022p\n6layer1/weights/ExponentialMovingAverage/cond/pred_id:0\0226layer1/weights/ExponentialMovingAverage/cond/pred_id:0\022h\n-layer1/weights/Initializer/truncated_normal:0\0227layer1/weights/ExponentialMovingAverage/cond/Switch_1:0"
  11282. value: "\n\020cond_1/cond_text\022\020cond_1/pred_id:0\032\021cond_1/switch_t:0 \001*\322\001\n\020cond_1/pred_id:0\n\024cond_1/read/Switch:1\n\rcond_1/read:0\n\021cond_1/switch_t:0\n\017layer1/biases:0\022$\n\020cond_1/pred_id:0\022\020cond_1/pred_id:0\022\'\n\017layer1/biases:0\022\024cond_1/read/Switch:1\022&\n\021cond_1/switch_t:0\022\021cond_1/switch_t:0"
  11283. value: "\n\022cond_1/cond_text_1\022\020cond_1/pred_id:0\032\021cond_1/switch_f:0*\364\001\n\021cond_1/Switch_1:0\n\021cond_1/Switch_1:1\n\020cond_1/pred_id:0\n\021cond_1/switch_f:0\n!layer1/biases/Initializer/Const:0\022&\n\021cond_1/switch_f:0\022\021cond_1/switch_f:0\022$\n\020cond_1/pred_id:0\022\020cond_1/pred_id:0\0226\n!layer1/biases/Initializer/Const:0\022\021cond_1/Switch_1:0"
  11284. value: "\n5layer1/biases/ExponentialMovingAverage/cond/cond_text\0225layer1/biases/ExponentialMovingAverage/cond/pred_id:0\0326layer1/biases/ExponentialMovingAverage/cond/switch_t:0 \001*\237\004\n5layer1/biases/ExponentialMovingAverage/cond/pred_id:0\n9layer1/biases/ExponentialMovingAverage/cond/read/Switch:1\n2layer1/biases/ExponentialMovingAverage/cond/read:0\n6layer1/biases/ExponentialMovingAverage/cond/switch_t:0\n\017layer1/biases:0\022L\n\017layer1/biases:0\0229layer1/biases/ExponentialMovingAverage/cond/read/Switch:1\022p\n6layer1/biases/ExponentialMovingAverage/cond/switch_t:0\0226layer1/biases/ExponentialMovingAverage/cond/switch_t:0\022n\n5layer1/biases/ExponentialMovingAverage/cond/pred_id:0\0225layer1/biases/ExponentialMovingAverage/cond/pred_id:0"
  11285. value: "\n7layer1/biases/ExponentialMovingAverage/cond/cond_text_1\0225layer1/biases/ExponentialMovingAverage/cond/pred_id:0\0326layer1/biases/ExponentialMovingAverage/cond/switch_f:0*\301\004\n6layer1/biases/ExponentialMovingAverage/cond/Switch_1:0\n6layer1/biases/ExponentialMovingAverage/cond/Switch_1:1\n5layer1/biases/ExponentialMovingAverage/cond/pred_id:0\n6layer1/biases/ExponentialMovingAverage/cond/switch_f:0\n!layer1/biases/Initializer/Const:0\022n\n5layer1/biases/ExponentialMovingAverage/cond/pred_id:0\0225layer1/biases/ExponentialMovingAverage/cond/pred_id:0\022p\n6layer1/biases/ExponentialMovingAverage/cond/switch_f:0\0226layer1/biases/ExponentialMovingAverage/cond/switch_f:0\022[\n!layer1/biases/Initializer/Const:0\0226layer1/biases/ExponentialMovingAverage/cond/Switch_1:0"
  11286. value: "\n\020cond_2/cond_text\022\020cond_2/pred_id:0\032\021cond_2/switch_t:0 \001*\324\001\n\020cond_2/pred_id:0\n\024cond_2/read/Switch:1\n\rcond_2/read:0\n\021cond_2/switch_t:0\n\020layer2/weights:0\022$\n\020cond_2/pred_id:0\022\020cond_2/pred_id:0\022&\n\021cond_2/switch_t:0\022\021cond_2/switch_t:0\022(\n\020layer2/weights:0\022\024cond_2/read/Switch:1"
  11287. value: "\n\022cond_2/cond_text_1\022\020cond_2/pred_id:0\032\021cond_2/switch_f:0*\214\002\n\021cond_2/Switch_1:0\n\021cond_2/Switch_1:1\n\020cond_2/pred_id:0\n\021cond_2/switch_f:0\n-layer2/weights/Initializer/truncated_normal:0\022&\n\021cond_2/switch_f:0\022\021cond_2/switch_f:0\022B\n-layer2/weights/Initializer/truncated_normal:0\022\021cond_2/Switch_1:0\022$\n\020cond_2/pred_id:0\022\020cond_2/pred_id:0"
  11288. value: "\n6layer2/weights/ExponentialMovingAverage/cond/cond_text\0226layer2/weights/ExponentialMovingAverage/cond/pred_id:0\0327layer2/weights/ExponentialMovingAverage/cond/switch_t:0 \001*\252\004\n6layer2/weights/ExponentialMovingAverage/cond/pred_id:0\n:layer2/weights/ExponentialMovingAverage/cond/read/Switch:1\n3layer2/weights/ExponentialMovingAverage/cond/read:0\n7layer2/weights/ExponentialMovingAverage/cond/switch_t:0\n\020layer2/weights:0\022r\n7layer2/weights/ExponentialMovingAverage/cond/switch_t:0\0227layer2/weights/ExponentialMovingAverage/cond/switch_t:0\022N\n\020layer2/weights:0\022:layer2/weights/ExponentialMovingAverage/cond/read/Switch:1\022p\n6layer2/weights/ExponentialMovingAverage/cond/pred_id:0\0226layer2/weights/ExponentialMovingAverage/cond/pred_id:0"
  11289. value: "\n8layer2/weights/ExponentialMovingAverage/cond/cond_text_1\0226layer2/weights/ExponentialMovingAverage/cond/pred_id:0\0327layer2/weights/ExponentialMovingAverage/cond/switch_f:0*\342\004\n7layer2/weights/ExponentialMovingAverage/cond/Switch_1:0\n7layer2/weights/ExponentialMovingAverage/cond/Switch_1:1\n6layer2/weights/ExponentialMovingAverage/cond/pred_id:0\n7layer2/weights/ExponentialMovingAverage/cond/switch_f:0\n-layer2/weights/Initializer/truncated_normal:0\022h\n-layer2/weights/Initializer/truncated_normal:0\0227layer2/weights/ExponentialMovingAverage/cond/Switch_1:0\022p\n6layer2/weights/ExponentialMovingAverage/cond/pred_id:0\0226layer2/weights/ExponentialMovingAverage/cond/pred_id:0\022r\n7layer2/weights/ExponentialMovingAverage/cond/switch_f:0\0227layer2/weights/ExponentialMovingAverage/cond/switch_f:0"
  11290. value: "\n\020cond_3/cond_text\022\020cond_3/pred_id:0\032\021cond_3/switch_t:0 \001*\322\001\n\020cond_3/pred_id:0\n\024cond_3/read/Switch:1\n\rcond_3/read:0\n\021cond_3/switch_t:0\n\017layer2/biases:0\022$\n\020cond_3/pred_id:0\022\020cond_3/pred_id:0\022\'\n\017layer2/biases:0\022\024cond_3/read/Switch:1\022&\n\021cond_3/switch_t:0\022\021cond_3/switch_t:0"
  11291. value: "\n\022cond_3/cond_text_1\022\020cond_3/pred_id:0\032\021cond_3/switch_f:0*\364\001\n\021cond_3/Switch_1:0\n\021cond_3/Switch_1:1\n\020cond_3/pred_id:0\n\021cond_3/switch_f:0\n!layer2/biases/Initializer/Const:0\0226\n!layer2/biases/Initializer/Const:0\022\021cond_3/Switch_1:0\022$\n\020cond_3/pred_id:0\022\020cond_3/pred_id:0\022&\n\021cond_3/switch_f:0\022\021cond_3/switch_f:0"
  11292. value: "\n5layer2/biases/ExponentialMovingAverage/cond/cond_text\0225layer2/biases/ExponentialMovingAverage/cond/pred_id:0\0326layer2/biases/ExponentialMovingAverage/cond/switch_t:0 \001*\237\004\n5layer2/biases/ExponentialMovingAverage/cond/pred_id:0\n9layer2/biases/ExponentialMovingAverage/cond/read/Switch:1\n2layer2/biases/ExponentialMovingAverage/cond/read:0\n6layer2/biases/ExponentialMovingAverage/cond/switch_t:0\n\017layer2/biases:0\022p\n6layer2/biases/ExponentialMovingAverage/cond/switch_t:0\0226layer2/biases/ExponentialMovingAverage/cond/switch_t:0\022L\n\017layer2/biases:0\0229layer2/biases/ExponentialMovingAverage/cond/read/Switch:1\022n\n5layer2/biases/ExponentialMovingAverage/cond/pred_id:0\0225layer2/biases/ExponentialMovingAverage/cond/pred_id:0"
  11293. value: "\n7layer2/biases/ExponentialMovingAverage/cond/cond_text_1\0225layer2/biases/ExponentialMovingAverage/cond/pred_id:0\0326layer2/biases/ExponentialMovingAverage/cond/switch_f:0*\301\004\n6layer2/biases/ExponentialMovingAverage/cond/Switch_1:0\n6layer2/biases/ExponentialMovingAverage/cond/Switch_1:1\n5layer2/biases/ExponentialMovingAverage/cond/pred_id:0\n6layer2/biases/ExponentialMovingAverage/cond/switch_f:0\n!layer2/biases/Initializer/Const:0\022n\n5layer2/biases/ExponentialMovingAverage/cond/pred_id:0\0225layer2/biases/ExponentialMovingAverage/cond/pred_id:0\022[\n!layer2/biases/Initializer/Const:0\0226layer2/biases/ExponentialMovingAverage/cond/Switch_1:0\022p\n6layer2/biases/ExponentialMovingAverage/cond/switch_f:0\0226layer2/biases/ExponentialMovingAverage/cond/switch_f:0"
  11294. }
  11295. }
  11296. }
  11297. collection_def {
  11298. key: "moving_average_variables"
  11299. value {
  11300. bytes_list {
  11301. value: "\n\020layer1/weights:0\022\025layer1/weights/Assign\032\025layer1/weights/read:02-layer1/weights/Initializer/truncated_normal:0"
  11302. value: "\n\017layer1/biases:0\022\024layer1/biases/Assign\032\024layer1/biases/read:02!layer1/biases/Initializer/Const:0"
  11303. value: "\n\020layer2/weights:0\022\025layer2/weights/Assign\032\025layer2/weights/read:02-layer2/weights/Initializer/truncated_normal:0"
  11304. value: "\n\017layer2/biases:0\022\024layer2/biases/Assign\032\024layer2/biases/read:02!layer2/biases/Initializer/Const:0"
  11305. }
  11306. }
  11307. }
  11308. collection_def {
  11309. key: "train_op"
  11310. value {
  11311. node_list {
  11312. value: "GradientDescent"
  11313. }
  11314. }
  11315. }
  11316. collection_def {
  11317. key: "trainable_variables"
  11318. value {
  11319. bytes_list {
  11320. value: "\n\020layer1/weights:0\022\025layer1/weights/Assign\032\025layer1/weights/read:02-layer1/weights/Initializer/truncated_normal:0"
  11321. value: "\n\017layer1/biases:0\022\024layer1/biases/Assign\032\024layer1/biases/read:02!layer1/biases/Initializer/Const:0"
  11322. value: "\n\020layer2/weights:0\022\025layer2/weights/Assign\032\025layer2/weights/read:02-layer2/weights/Initializer/truncated_normal:0"
  11323. value: "\n\017layer2/biases:0\022\024layer2/biases/Assign\032\024layer2/biases/read:02!layer2/biases/Initializer/Const:0"
  11324. }
  11325. }
  11326. }
  11327. collection_def {
  11328. key: "variables"
  11329. value {
  11330. bytes_list {
  11331. value: "\n\020layer1/weights:0\022\025layer1/weights/Assign\032\025layer1/weights/read:02-layer1/weights/Initializer/truncated_normal:0"
  11332. value: "\n\017layer1/biases:0\022\024layer1/biases/Assign\032\024layer1/biases/read:02!layer1/biases/Initializer/Const:0"
  11333. value: "\n\020layer2/weights:0\022\025layer2/weights/Assign\032\025layer2/weights/read:02-layer2/weights/Initializer/truncated_normal:0"
  11334. value: "\n\017layer2/biases:0\022\024layer2/biases/Assign\032\024layer2/biases/read:02!layer2/biases/Initializer/Const:0"
  11335. value: "\n\nVariable:0\022\017Variable/Assign\032\017Variable/read:02\030Variable/initial_value:0"
  11336. value: "\n)layer1/weights/ExponentialMovingAverage:0\022.layer1/weights/ExponentialMovingAverage/Assign\032.layer1/weights/ExponentialMovingAverage/read:02\014cond/Merge:0"
  11337. value: "\n(layer1/biases/ExponentialMovingAverage:0\022-layer1/biases/ExponentialMovingAverage/Assign\032-layer1/biases/ExponentialMovingAverage/read:02\016cond_1/Merge:0"
  11338. value: "\n)layer2/weights/ExponentialMovingAverage:0\022.layer2/weights/ExponentialMovingAverage/Assign\032.layer2/weights/ExponentialMovingAverage/read:02\016cond_2/Merge:0"
  11339. value: "\n(layer2/biases/ExponentialMovingAverage:0\022-layer2/biases/ExponentialMovingAverage/Assign\032-layer2/biases/ExponentialMovingAverage/read:02\016cond_3/Merge:0"
  11340. }
  11341. }
  11342. }