diff --git a/.gitignore b/.gitignore
index 7540a95..df50c22 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,3 +4,12 @@ memristor_utils_g_extradim.py
memristor_utils_g_posneg.py
*.log
*.pdf
+/main.py
+*.h5
+*.pkl
+*.csv
+*.zip
+*.mat
+*.pickle
+*.xlsx
+*.~lock*
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..f288702
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/MNIST/Train.py b/MNIST/Train.py
deleted file mode 100755
index 8a8b846..0000000
--- a/MNIST/Train.py
+++ /dev/null
@@ -1,162 +0,0 @@
-import numpy as np
-import pickle
-import math
-import tensorflow as tf
-from tensorflow import keras
-from tensorflow.keras.datasets import cifar10, mnist
-from tensorflow.keras.optimizers import SGD
-from tensorflow.keras.preprocessing.image import ImageDataGenerator
-import os
-import sys
-sys.path.insert(0, '..')
-from model_architectures import get_model
-
-
-def load_svhn(path_to_dataset):
- import scipy.io as sio
- train = sio.loadmat(path_to_dataset+'/train.mat')
- test = sio.loadmat(path_to_dataset+'/test.mat')
- extra = sio.loadmat(path_to_dataset+'/extra.mat')
- x_train = np.transpose(train['X'], [3, 0, 1, 2])
- y_train = train['y']-1
-
- x_test = np.transpose(test['X'], [3, 0, 1, 2])
- y_test = test['y']-1
-
- x_extra = np.transpose(extra['X'], [3, 0, 1, 2])
- y_extra=extra['y']-1
-
- x_train = np.concatenate((x_train, x_extra), axis=0)
- y_train = np.concatenate((y_train, y_extra), axis=0)
-
- return (x_train, y_train), (x_test, y_test)
-
-def get_examples(dataset):
- if dataset == "MNIST":
- (x_train, y_train), (x_test, y_test) = mnist.load_data()
- # convert class vectors to binary class matrices
- x_train = x_train.reshape(-1,784)
- x_test = x_test.reshape(-1,784)
- use_generator = False
- elif dataset == "CIFAR-10" or dataset == "binarynet":
- use_generator = True
- (x_train, y_train), (x_test, y_test) = cifar10.load_data()
- elif dataset == "SVHN" or dataset == "binarynet-svhn":
- use_generator = True
- (x_train, y_train), (x_test, y_test) = load_svhn('./svhn_data')
- else:
- raise("dataset should be one of the following: [MNIST, CIFAR-10, SVHN, binarynet, binarynet-svhn].")
-
- x_train = x_train.astype(np.float32)
- x_test = x_test.astype(np.float32)
- x_train /= 255
- x_test /= 255
-
- print('x_train shape:', x_train.shape)
- print(x_train.shape[0], 'train samples')
- print(x_test.shape[0], 'test samples')
-
- return x_train, y_train, x_test, y_test, use_generator
-
-# learning rate schedule
-def step_decay(epoch):
- initial_lrate = 0.025
- drop = 0.5
- epochs_drop = 50.0
- lrate = initial_lrate * math.pow(drop, math.floor((1+epoch)/epochs_drop))
- return lrate
-
-def train_network(dir_path, dataset, x_train, y_train, num_epochs, use_generator, batch_size, group_idx=None, is_regularized=False):
- os.makedirs(dir_path, exist_ok=True)
-
- model = get_model(dataset, batch_size, group_idx=group_idx, is_regularized=is_regularized)
-
- lr = 0.01
- opt = keras.optimizers.SGD(lr=lr)
- model.compile(
- loss='sparse_categorical_crossentropy',optimizer=opt, metrics=['accuracy'])
-
- weights_path = dir_path + "/output_model.h5"
- cback=keras.callbacks.ModelCheckpoint(
- weights_path, monitor='val_accuracy', save_best_only=True)
-
- if use_generator:
- if dataset=="CIFAR-10" or dataset=="binarynet":
- horizontal_flip=True
- if dataset=="SVHN" or dataset=="binarynet-svhn":
- horizontal_flip=False
-
- datagen = ImageDataGenerator(
- width_shift_range=0.15, # randomly shift images horizontally (fraction of total width)
- height_shift_range=0.15, # randomly shift images vertically (fraction of total height)
- horizontal_flip=horizontal_flip) # randomly flip images
- if keras.__version__[0]=='2':
- history=model.fit_generator(
- datagen.flow(x_train, y_train, batch_size=batch_size),
- steps_per_epoch=x_train.shape[0]/batch_size,
- nb_epoch=num_epochs, validation_split=0.1,
- verbose=2, callbacks=[cback])
- if keras.__version__[0]=='1':
- history=model.fit_generator(
- datagen.flow(x_train, y_train,batch_size=batch_size),
- samples_per_epoch=x_train.shape[0], nb_epoch=num_epochs,
- verbose=2, validation_split=0.1, callbacks=[cback])
- else:
- if keras.__version__[0]=='2':
- history=model.fit(x_train, y_train, batch_size=batch_size,validation_split=0.1, verbose=2, epochs=num_epochs,callbacks=[cback])
- if keras.__version__[0]=='1':
- history=model.fit(x_train, y_train,batch_size=batch_size,validation_split=0.1, verbose=2,nb_epoch=num_epochs,callbacks=[cback])
-
- dic={'hard': history.history}
- history_path = dir_path + "/history_output_model.pkl"
- with open(history_path,'wb') as handle:
- pickle.dump(dic, handle)
-
-def evaluate_network(dir_path, dataset, x_test, y_test, batch_size, group_idx=None, is_regularized=True, log_dir_full_path=None):
- weights_path= dir_path + "/output_model.h5"
- model = get_model(dataset, batch_size, group_idx=group_idx, is_regularized=is_regularized, log_dir_full_path=log_dir_full_path)
- model.load_weights(weights_path)
- opt = keras.optimizers.SGD()
- model.compile(loss='sparse_categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
- score = model.evaluate(x_test, y_test, verbose=0, batch_size=batch_size)
- if log_dir_full_path is None:
- print("Test loss was %0.4f, test accuracy was %0.4f"%(score[0], score[1]))
- else:
- log_file_full_path = "{}/accuracy.csv".format(log_dir_full_path)
- open(log_file_full_path, "a").close()
- tf.print(score[1], output_stream="file://{}".format(log_file_full_path))
-
-
-
-dataset = "MNIST"
-path_to_project = "path/to/project"
-Train = True
-Evaluate = True
-if Train:
- batch_size = 100
-else:
- batch_size = 10000
-num_epochs = 1000
-x_train, y_train, x_test, y_test, use_generator = get_examples(dataset)
-
-group_idxs = [0, 2, 1]
-num_training_repeats = 5
-num_inference_repeats = 25
-
-for is_regularized in [True, False]:
- if is_regularized:
- regularized_label = "regularized"
- else:
- regularized_label = "non-regularized"
- for group_idx in group_idxs:
- log_dir_full_path = "{}/MNIST/models/{}/{}/group-{}".format(
- path_to_project, dataset, regularized_label, group_idx)
- for training_repeat_idx in range(num_training_repeats):
- dir_path = "models/{}/{}/group-{}/network-{}".format(
- dataset, regularized_label, group_idx, training_repeat_idx)
- if Train:
- train_network(dir_path, dataset, x_train, y_train, num_epochs, use_generator, batch_size, group_idx=group_idx, is_regularized=is_regularized)
- if Evaluate:
- for inference_repeat_idx in range(num_inference_repeats):
- evaluate_network(dir_path, dataset, x_test, y_test, batch_size, group_idx=group_idx, is_regularized=is_regularized, log_dir_full_path=log_dir_full_path)
-
diff --git a/MNIST/nonlinear_autograd.txt b/MNIST/nonlinear_autograd.txt
deleted file mode 100644
index d58a3e3..0000000
--- a/MNIST/nonlinear_autograd.txt
+++ /dev/null
@@ -1,2005 +0,0 @@
-X_train shape: (60000, 784)
-60000 train samples
-10000 test samples
-Train on 60000 samples, validate on 10000 samples
-Epoch 1/1000
-60000/60000 - 9s - loss: 0.7709 - accuracy: 0.7652 - val_loss: 0.4542 - val_accuracy: 0.8667
-Epoch 2/1000
-60000/60000 - 6s - loss: 0.4109 - accuracy: 0.8801 - val_loss: 0.3558 - val_accuracy: 0.8991
-Epoch 3/1000
-60000/60000 - 6s - loss: 0.3375 - accuracy: 0.9016 - val_loss: 0.3199 - val_accuracy: 0.9029
-Epoch 4/1000
-60000/60000 - 6s - loss: 0.2981 - accuracy: 0.9125 - val_loss: 0.2970 - val_accuracy: 0.9117
-Epoch 5/1000
-60000/60000 - 6s - loss: 0.2747 - accuracy: 0.9191 - val_loss: 0.2739 - val_accuracy: 0.9171
-Epoch 6/1000
-60000/60000 - 6s - loss: 0.2559 - accuracy: 0.9245 - val_loss: 0.2680 - val_accuracy: 0.9189
-Epoch 7/1000
-60000/60000 - 6s - loss: 0.2403 - accuracy: 0.9283 - val_loss: 0.2660 - val_accuracy: 0.9202
-Epoch 8/1000
-60000/60000 - 6s - loss: 0.2273 - accuracy: 0.9327 - val_loss: 0.2586 - val_accuracy: 0.9200
-Epoch 9/1000
-60000/60000 - 6s - loss: 0.2156 - accuracy: 0.9370 - val_loss: 0.2522 - val_accuracy: 0.9237
-Epoch 10/1000
-60000/60000 - 6s - loss: 0.2059 - accuracy: 0.9388 - val_loss: 0.2586 - val_accuracy: 0.9228
-Epoch 11/1000
-60000/60000 - 6s - loss: 0.1988 - accuracy: 0.9414 - val_loss: 0.2469 - val_accuracy: 0.9263
-Epoch 12/1000
-60000/60000 - 6s - loss: 0.1920 - accuracy: 0.9438 - val_loss: 0.2464 - val_accuracy: 0.9263
-Epoch 13/1000
-60000/60000 - 6s - loss: 0.1854 - accuracy: 0.9453 - val_loss: 0.2431 - val_accuracy: 0.9281
-Epoch 14/1000
-60000/60000 - 6s - loss: 0.1802 - accuracy: 0.9469 - val_loss: 0.2466 - val_accuracy: 0.9252
-Epoch 15/1000
-60000/60000 - 6s - loss: 0.1742 - accuracy: 0.9486 - val_loss: 0.2430 - val_accuracy: 0.9294
-Epoch 16/1000
-60000/60000 - 6s - loss: 0.1711 - accuracy: 0.9494 - val_loss: 0.2393 - val_accuracy: 0.9300
-Epoch 17/1000
-60000/60000 - 6s - loss: 0.1670 - accuracy: 0.9502 - val_loss: 0.2432 - val_accuracy: 0.9287
-Epoch 18/1000
-60000/60000 - 6s - loss: 0.1619 - accuracy: 0.9522 - val_loss: 0.2384 - val_accuracy: 0.9324
-Epoch 19/1000
-60000/60000 - 6s - loss: 0.1577 - accuracy: 0.9532 - val_loss: 0.2420 - val_accuracy: 0.9298
-Epoch 20/1000
-60000/60000 - 6s - loss: 0.1533 - accuracy: 0.9552 - val_loss: 0.2388 - val_accuracy: 0.9304
-Epoch 21/1000
-60000/60000 - 6s - loss: 0.1508 - accuracy: 0.9551 - val_loss: 0.2442 - val_accuracy: 0.9279
-Epoch 22/1000
-60000/60000 - 6s - loss: 0.1477 - accuracy: 0.9562 - val_loss: 0.2417 - val_accuracy: 0.9285
-Epoch 23/1000
-60000/60000 - 6s - loss: 0.1457 - accuracy: 0.9567 - val_loss: 0.2384 - val_accuracy: 0.9273
-Epoch 24/1000
-60000/60000 - 6s - loss: 0.1429 - accuracy: 0.9579 - val_loss: 0.2403 - val_accuracy: 0.9312
-Epoch 25/1000
-60000/60000 - 6s - loss: 0.1400 - accuracy: 0.9585 - val_loss: 0.2350 - val_accuracy: 0.9310
-Epoch 26/1000
-60000/60000 - 6s - loss: 0.1374 - accuracy: 0.9596 - val_loss: 0.2388 - val_accuracy: 0.9293
-Epoch 27/1000
-60000/60000 - 6s - loss: 0.1342 - accuracy: 0.9603 - val_loss: 0.2398 - val_accuracy: 0.9297
-Epoch 28/1000
-60000/60000 - 6s - loss: 0.1301 - accuracy: 0.9621 - val_loss: 0.2356 - val_accuracy: 0.9310
-Epoch 29/1000
-60000/60000 - 6s - loss: 0.1296 - accuracy: 0.9622 - val_loss: 0.2326 - val_accuracy: 0.9322
-Epoch 30/1000
-60000/60000 - 6s - loss: 0.1269 - accuracy: 0.9629 - val_loss: 0.2450 - val_accuracy: 0.9283
-Epoch 31/1000
-60000/60000 - 6s - loss: 0.1249 - accuracy: 0.9641 - val_loss: 0.2444 - val_accuracy: 0.9281
-Epoch 32/1000
-60000/60000 - 6s - loss: 0.1237 - accuracy: 0.9640 - val_loss: 0.2431 - val_accuracy: 0.9308
-Epoch 33/1000
-60000/60000 - 6s - loss: 0.1227 - accuracy: 0.9644 - val_loss: 0.2406 - val_accuracy: 0.9310
-Epoch 34/1000
-60000/60000 - 6s - loss: 0.1220 - accuracy: 0.9641 - val_loss: 0.2445 - val_accuracy: 0.9302
-Epoch 35/1000
-60000/60000 - 6s - loss: 0.1195 - accuracy: 0.9652 - val_loss: 0.2401 - val_accuracy: 0.9319
-Epoch 36/1000
-60000/60000 - 6s - loss: 0.1181 - accuracy: 0.9658 - val_loss: 0.2423 - val_accuracy: 0.9300
-Epoch 37/1000
-60000/60000 - 6s - loss: 0.1166 - accuracy: 0.9665 - val_loss: 0.2451 - val_accuracy: 0.9294
-Epoch 38/1000
-60000/60000 - 6s - loss: 0.1152 - accuracy: 0.9665 - val_loss: 0.2485 - val_accuracy: 0.9286
-Epoch 39/1000
-60000/60000 - 6s - loss: 0.1123 - accuracy: 0.9673 - val_loss: 0.2440 - val_accuracy: 0.9290
-Epoch 40/1000
-60000/60000 - 6s - loss: 0.1121 - accuracy: 0.9678 - val_loss: 0.2479 - val_accuracy: 0.9301
-Epoch 41/1000
-60000/60000 - 6s - loss: 0.1095 - accuracy: 0.9686 - val_loss: 0.2423 - val_accuracy: 0.9298
-Epoch 42/1000
-60000/60000 - 6s - loss: 0.1086 - accuracy: 0.9689 - val_loss: 0.2558 - val_accuracy: 0.9298
-Epoch 43/1000
-60000/60000 - 6s - loss: 0.1093 - accuracy: 0.9687 - val_loss: 0.2535 - val_accuracy: 0.9285
-Epoch 44/1000
-60000/60000 - 6s - loss: 0.1100 - accuracy: 0.9682 - val_loss: 0.2487 - val_accuracy: 0.9291
-Epoch 45/1000
-60000/60000 - 6s - loss: 0.1062 - accuracy: 0.9694 - val_loss: 0.2486 - val_accuracy: 0.9300
-Epoch 46/1000
-60000/60000 - 6s - loss: 0.1049 - accuracy: 0.9709 - val_loss: 0.2478 - val_accuracy: 0.9310
-Epoch 47/1000
-60000/60000 - 6s - loss: 0.1032 - accuracy: 0.9706 - val_loss: 0.2462 - val_accuracy: 0.9301
-Epoch 48/1000
-60000/60000 - 6s - loss: 0.1025 - accuracy: 0.9714 - val_loss: 0.2496 - val_accuracy: 0.9292
-Epoch 49/1000
-60000/60000 - 6s - loss: 0.1007 - accuracy: 0.9719 - val_loss: 0.2462 - val_accuracy: 0.9316
-Epoch 50/1000
-60000/60000 - 6s - loss: 0.0996 - accuracy: 0.9723 - val_loss: 0.2511 - val_accuracy: 0.9300
-Epoch 51/1000
-60000/60000 - 6s - loss: 0.0986 - accuracy: 0.9725 - val_loss: 0.2544 - val_accuracy: 0.9304
-Epoch 52/1000
-60000/60000 - 6s - loss: 0.0998 - accuracy: 0.9721 - val_loss: 0.2481 - val_accuracy: 0.9321
-Epoch 53/1000
-60000/60000 - 6s - loss: 0.0988 - accuracy: 0.9726 - val_loss: 0.2531 - val_accuracy: 0.9288
-Epoch 54/1000
-60000/60000 - 6s - loss: 0.0973 - accuracy: 0.9733 - val_loss: 0.2497 - val_accuracy: 0.9310
-Epoch 55/1000
-60000/60000 - 6s - loss: 0.0988 - accuracy: 0.9724 - val_loss: 0.2524 - val_accuracy: 0.9285
-Epoch 56/1000
-60000/60000 - 6s - loss: 0.0954 - accuracy: 0.9735 - val_loss: 0.2546 - val_accuracy: 0.9313
-Epoch 57/1000
-60000/60000 - 6s - loss: 0.0943 - accuracy: 0.9737 - val_loss: 0.2547 - val_accuracy: 0.9304
-Epoch 58/1000
-60000/60000 - 6s - loss: 0.0953 - accuracy: 0.9732 - val_loss: 0.2540 - val_accuracy: 0.9289
-Epoch 59/1000
-60000/60000 - 6s - loss: 0.0954 - accuracy: 0.9737 - val_loss: 0.2603 - val_accuracy: 0.9290
-Epoch 60/1000
-60000/60000 - 6s - loss: 0.0931 - accuracy: 0.9743 - val_loss: 0.2615 - val_accuracy: 0.9287
-Epoch 61/1000
-60000/60000 - 6s - loss: 0.0928 - accuracy: 0.9743 - val_loss: 0.2574 - val_accuracy: 0.9305
-Epoch 62/1000
-60000/60000 - 6s - loss: 0.0905 - accuracy: 0.9754 - val_loss: 0.2611 - val_accuracy: 0.9290
-Epoch 63/1000
-60000/60000 - 6s - loss: 0.0905 - accuracy: 0.9752 - val_loss: 0.2633 - val_accuracy: 0.9294
-Epoch 64/1000
-60000/60000 - 6s - loss: 0.0905 - accuracy: 0.9751 - val_loss: 0.2626 - val_accuracy: 0.9288
-Epoch 65/1000
-60000/60000 - 6s - loss: 0.0891 - accuracy: 0.9760 - val_loss: 0.2588 - val_accuracy: 0.9283
-Epoch 66/1000
-60000/60000 - 6s - loss: 0.0896 - accuracy: 0.9758 - val_loss: 0.2612 - val_accuracy: 0.9305
-Epoch 67/1000
-60000/60000 - 6s - loss: 0.0888 - accuracy: 0.9759 - val_loss: 0.2625 - val_accuracy: 0.9308
-Epoch 68/1000
-60000/60000 - 6s - loss: 0.0881 - accuracy: 0.9759 - val_loss: 0.2648 - val_accuracy: 0.9307
-Epoch 69/1000
-60000/60000 - 6s - loss: 0.0876 - accuracy: 0.9764 - val_loss: 0.2605 - val_accuracy: 0.9291
-Epoch 70/1000
-60000/60000 - 6s - loss: 0.0872 - accuracy: 0.9765 - val_loss: 0.2620 - val_accuracy: 0.9288
-Epoch 71/1000
-60000/60000 - 6s - loss: 0.0871 - accuracy: 0.9767 - val_loss: 0.2585 - val_accuracy: 0.9295
-Epoch 72/1000
-60000/60000 - 6s - loss: 0.0878 - accuracy: 0.9762 - val_loss: 0.2631 - val_accuracy: 0.9288
-Epoch 73/1000
-60000/60000 - 6s - loss: 0.0862 - accuracy: 0.9771 - val_loss: 0.2665 - val_accuracy: 0.9284
-Epoch 74/1000
-60000/60000 - 6s - loss: 0.0853 - accuracy: 0.9765 - val_loss: 0.2652 - val_accuracy: 0.9278
-Epoch 75/1000
-60000/60000 - 6s - loss: 0.0840 - accuracy: 0.9778 - val_loss: 0.2651 - val_accuracy: 0.9287
-Epoch 76/1000
-60000/60000 - 6s - loss: 0.0838 - accuracy: 0.9779 - val_loss: 0.2603 - val_accuracy: 0.9310
-Epoch 77/1000
-60000/60000 - 6s - loss: 0.0841 - accuracy: 0.9776 - val_loss: 0.2692 - val_accuracy: 0.9260
-Epoch 78/1000
-60000/60000 - 6s - loss: 0.0836 - accuracy: 0.9779 - val_loss: 0.2718 - val_accuracy: 0.9279
-Epoch 79/1000
-60000/60000 - 6s - loss: 0.0837 - accuracy: 0.9780 - val_loss: 0.2644 - val_accuracy: 0.9295
-Epoch 80/1000
-60000/60000 - 6s - loss: 0.0831 - accuracy: 0.9778 - val_loss: 0.2675 - val_accuracy: 0.9283
-Epoch 81/1000
-60000/60000 - 6s - loss: 0.0811 - accuracy: 0.9780 - val_loss: 0.2663 - val_accuracy: 0.9298
-Epoch 82/1000
-60000/60000 - 6s - loss: 0.0819 - accuracy: 0.9782 - val_loss: 0.2658 - val_accuracy: 0.9287
-Epoch 83/1000
-60000/60000 - 6s - loss: 0.0819 - accuracy: 0.9784 - val_loss: 0.2709 - val_accuracy: 0.9277
-Epoch 84/1000
-60000/60000 - 6s - loss: 0.0812 - accuracy: 0.9787 - val_loss: 0.2698 - val_accuracy: 0.9280
-Epoch 85/1000
-60000/60000 - 6s - loss: 0.0810 - accuracy: 0.9790 - val_loss: 0.2667 - val_accuracy: 0.9290
-Epoch 86/1000
-60000/60000 - 6s - loss: 0.0805 - accuracy: 0.9786 - val_loss: 0.2685 - val_accuracy: 0.9280
-Epoch 87/1000
-60000/60000 - 6s - loss: 0.0804 - accuracy: 0.9785 - val_loss: 0.2727 - val_accuracy: 0.9285
-Epoch 88/1000
-60000/60000 - 6s - loss: 0.0779 - accuracy: 0.9798 - val_loss: 0.2718 - val_accuracy: 0.9292
-Epoch 89/1000
-60000/60000 - 6s - loss: 0.0781 - accuracy: 0.9797 - val_loss: 0.2716 - val_accuracy: 0.9287
-Epoch 90/1000
-60000/60000 - 6s - loss: 0.0779 - accuracy: 0.9797 - val_loss: 0.2735 - val_accuracy: 0.9286
-Epoch 91/1000
-60000/60000 - 6s - loss: 0.0783 - accuracy: 0.9796 - val_loss: 0.2752 - val_accuracy: 0.9265
-Epoch 92/1000
-60000/60000 - 6s - loss: 0.0770 - accuracy: 0.9800 - val_loss: 0.2749 - val_accuracy: 0.9291
-Epoch 93/1000
-60000/60000 - 6s - loss: 0.0768 - accuracy: 0.9802 - val_loss: 0.2756 - val_accuracy: 0.9283
-Epoch 94/1000
-60000/60000 - 6s - loss: 0.0783 - accuracy: 0.9792 - val_loss: 0.2721 - val_accuracy: 0.9287
-Epoch 95/1000
-60000/60000 - 6s - loss: 0.0778 - accuracy: 0.9797 - val_loss: 0.2725 - val_accuracy: 0.9304
-Epoch 96/1000
-60000/60000 - 6s - loss: 0.0763 - accuracy: 0.9799 - val_loss: 0.2742 - val_accuracy: 0.9291
-Epoch 97/1000
-60000/60000 - 6s - loss: 0.0772 - accuracy: 0.9798 - val_loss: 0.2783 - val_accuracy: 0.9296
-Epoch 98/1000
-60000/60000 - 6s - loss: 0.0761 - accuracy: 0.9804 - val_loss: 0.2821 - val_accuracy: 0.9276
-Epoch 99/1000
-60000/60000 - 6s - loss: 0.0767 - accuracy: 0.9798 - val_loss: 0.2818 - val_accuracy: 0.9274
-Epoch 100/1000
-60000/60000 - 6s - loss: 0.0759 - accuracy: 0.9801 - val_loss: 0.2786 - val_accuracy: 0.9274
-Epoch 101/1000
-60000/60000 - 6s - loss: 0.0752 - accuracy: 0.9804 - val_loss: 0.2767 - val_accuracy: 0.9285
-Epoch 102/1000
-60000/60000 - 6s - loss: 0.0757 - accuracy: 0.9800 - val_loss: 0.2811 - val_accuracy: 0.9271
-Epoch 103/1000
-60000/60000 - 6s - loss: 0.0742 - accuracy: 0.9808 - val_loss: 0.2777 - val_accuracy: 0.9285
-Epoch 104/1000
-60000/60000 - 6s - loss: 0.0742 - accuracy: 0.9810 - val_loss: 0.2820 - val_accuracy: 0.9290
-Epoch 105/1000
-60000/60000 - 6s - loss: 0.0736 - accuracy: 0.9806 - val_loss: 0.2806 - val_accuracy: 0.9283
-Epoch 106/1000
-60000/60000 - 6s - loss: 0.0765 - accuracy: 0.9796 - val_loss: 0.2779 - val_accuracy: 0.9282
-Epoch 107/1000
-60000/60000 - 6s - loss: 0.0756 - accuracy: 0.9799 - val_loss: 0.2811 - val_accuracy: 0.9278
-Epoch 108/1000
-60000/60000 - 6s - loss: 0.0746 - accuracy: 0.9803 - val_loss: 0.2803 - val_accuracy: 0.9283
-Epoch 109/1000
-60000/60000 - 6s - loss: 0.0752 - accuracy: 0.9802 - val_loss: 0.2857 - val_accuracy: 0.9275
-Epoch 110/1000
-60000/60000 - 6s - loss: 0.0738 - accuracy: 0.9810 - val_loss: 0.2804 - val_accuracy: 0.9286
-Epoch 111/1000
-60000/60000 - 6s - loss: 0.0748 - accuracy: 0.9804 - val_loss: 0.2841 - val_accuracy: 0.9281
-Epoch 112/1000
-60000/60000 - 6s - loss: 0.0742 - accuracy: 0.9808 - val_loss: 0.2833 - val_accuracy: 0.9298
-Epoch 113/1000
-60000/60000 - 6s - loss: 0.0744 - accuracy: 0.9806 - val_loss: 0.2834 - val_accuracy: 0.9267
-Epoch 114/1000
-60000/60000 - 6s - loss: 0.0722 - accuracy: 0.9815 - val_loss: 0.2823 - val_accuracy: 0.9280
-Epoch 115/1000
-60000/60000 - 6s - loss: 0.0716 - accuracy: 0.9815 - val_loss: 0.2818 - val_accuracy: 0.9285
-Epoch 116/1000
-60000/60000 - 6s - loss: 0.0711 - accuracy: 0.9817 - val_loss: 0.2813 - val_accuracy: 0.9290
-Epoch 117/1000
-60000/60000 - 6s - loss: 0.0713 - accuracy: 0.9818 - val_loss: 0.2849 - val_accuracy: 0.9283
-Epoch 118/1000
-60000/60000 - 6s - loss: 0.0722 - accuracy: 0.9813 - val_loss: 0.2847 - val_accuracy: 0.9277
-Epoch 119/1000
-60000/60000 - 6s - loss: 0.0729 - accuracy: 0.9807 - val_loss: 0.2849 - val_accuracy: 0.9275
-Epoch 120/1000
-60000/60000 - 6s - loss: 0.0712 - accuracy: 0.9816 - val_loss: 0.2843 - val_accuracy: 0.9274
-Epoch 121/1000
-60000/60000 - 6s - loss: 0.0705 - accuracy: 0.9819 - val_loss: 0.2890 - val_accuracy: 0.9267
-Epoch 122/1000
-60000/60000 - 6s - loss: 0.0704 - accuracy: 0.9821 - val_loss: 0.2866 - val_accuracy: 0.9276
-Epoch 123/1000
-60000/60000 - 6s - loss: 0.0696 - accuracy: 0.9821 - val_loss: 0.2856 - val_accuracy: 0.9269
-Epoch 124/1000
-60000/60000 - 6s - loss: 0.0693 - accuracy: 0.9823 - val_loss: 0.2859 - val_accuracy: 0.9271
-Epoch 125/1000
-60000/60000 - 6s - loss: 0.0698 - accuracy: 0.9819 - val_loss: 0.2848 - val_accuracy: 0.9276
-Epoch 126/1000
-60000/60000 - 6s - loss: 0.0695 - accuracy: 0.9822 - val_loss: 0.2867 - val_accuracy: 0.9270
-Epoch 127/1000
-60000/60000 - 6s - loss: 0.0688 - accuracy: 0.9824 - val_loss: 0.2850 - val_accuracy: 0.9273
-Epoch 128/1000
-60000/60000 - 6s - loss: 0.0687 - accuracy: 0.9824 - val_loss: 0.2888 - val_accuracy: 0.9286
-Epoch 129/1000
-60000/60000 - 6s - loss: 0.0693 - accuracy: 0.9821 - val_loss: 0.2863 - val_accuracy: 0.9280
-Epoch 130/1000
-60000/60000 - 6s - loss: 0.0674 - accuracy: 0.9829 - val_loss: 0.2844 - val_accuracy: 0.9297
-Epoch 131/1000
-60000/60000 - 6s - loss: 0.0685 - accuracy: 0.9826 - val_loss: 0.2880 - val_accuracy: 0.9284
-Epoch 132/1000
-60000/60000 - 6s - loss: 0.0680 - accuracy: 0.9831 - val_loss: 0.2932 - val_accuracy: 0.9267
-Epoch 133/1000
-60000/60000 - 6s - loss: 0.0683 - accuracy: 0.9825 - val_loss: 0.2942 - val_accuracy: 0.9268
-Epoch 134/1000
-60000/60000 - 6s - loss: 0.0684 - accuracy: 0.9821 - val_loss: 0.2895 - val_accuracy: 0.9275
-Epoch 135/1000
-60000/60000 - 6s - loss: 0.0676 - accuracy: 0.9829 - val_loss: 0.2927 - val_accuracy: 0.9285
-Epoch 136/1000
-60000/60000 - 6s - loss: 0.0669 - accuracy: 0.9829 - val_loss: 0.2939 - val_accuracy: 0.9266
-Epoch 137/1000
-60000/60000 - 6s - loss: 0.0674 - accuracy: 0.9830 - val_loss: 0.2936 - val_accuracy: 0.9267
-Epoch 138/1000
-60000/60000 - 6s - loss: 0.0662 - accuracy: 0.9834 - val_loss: 0.2942 - val_accuracy: 0.9273
-Epoch 139/1000
-60000/60000 - 6s - loss: 0.0659 - accuracy: 0.9832 - val_loss: 0.2905 - val_accuracy: 0.9277
-Epoch 140/1000
-60000/60000 - 6s - loss: 0.0666 - accuracy: 0.9834 - val_loss: 0.2936 - val_accuracy: 0.9276
-Epoch 141/1000
-60000/60000 - 6s - loss: 0.0693 - accuracy: 0.9821 - val_loss: 0.2961 - val_accuracy: 0.9269
-Epoch 142/1000
-60000/60000 - 6s - loss: 0.0669 - accuracy: 0.9831 - val_loss: 0.2945 - val_accuracy: 0.9281
-Epoch 143/1000
-60000/60000 - 6s - loss: 0.0658 - accuracy: 0.9833 - val_loss: 0.2936 - val_accuracy: 0.9273
-Epoch 144/1000
-60000/60000 - 6s - loss: 0.0655 - accuracy: 0.9833 - val_loss: 0.2970 - val_accuracy: 0.9279
-Epoch 145/1000
-60000/60000 - 6s - loss: 0.0657 - accuracy: 0.9833 - val_loss: 0.2951 - val_accuracy: 0.9271
-Epoch 146/1000
-60000/60000 - 6s - loss: 0.0660 - accuracy: 0.9831 - val_loss: 0.2949 - val_accuracy: 0.9272
-Epoch 147/1000
-60000/60000 - 6s - loss: 0.0662 - accuracy: 0.9835 - val_loss: 0.2964 - val_accuracy: 0.9255
-Epoch 148/1000
-60000/60000 - 6s - loss: 0.0654 - accuracy: 0.9838 - val_loss: 0.2965 - val_accuracy: 0.9272
-Epoch 149/1000
-60000/60000 - 6s - loss: 0.0644 - accuracy: 0.9839 - val_loss: 0.2954 - val_accuracy: 0.9277
-Epoch 150/1000
-60000/60000 - 6s - loss: 0.0645 - accuracy: 0.9837 - val_loss: 0.2975 - val_accuracy: 0.9259
-Epoch 151/1000
-60000/60000 - 6s - loss: 0.0662 - accuracy: 0.9829 - val_loss: 0.2966 - val_accuracy: 0.9280
-Epoch 152/1000
-60000/60000 - 6s - loss: 0.0656 - accuracy: 0.9834 - val_loss: 0.2970 - val_accuracy: 0.9268
-Epoch 153/1000
-60000/60000 - 6s - loss: 0.0640 - accuracy: 0.9841 - val_loss: 0.2983 - val_accuracy: 0.9265
-Epoch 154/1000
-60000/60000 - 6s - loss: 0.0662 - accuracy: 0.9830 - val_loss: 0.2965 - val_accuracy: 0.9274
-Epoch 155/1000
-60000/60000 - 6s - loss: 0.0656 - accuracy: 0.9835 - val_loss: 0.2996 - val_accuracy: 0.9268
-Epoch 156/1000
-60000/60000 - 6s - loss: 0.0657 - accuracy: 0.9834 - val_loss: 0.2998 - val_accuracy: 0.9275
-Epoch 157/1000
-60000/60000 - 6s - loss: 0.0653 - accuracy: 0.9834 - val_loss: 0.3028 - val_accuracy: 0.9265
-Epoch 158/1000
-60000/60000 - 6s - loss: 0.0642 - accuracy: 0.9839 - val_loss: 0.2976 - val_accuracy: 0.9274
-Epoch 159/1000
-60000/60000 - 6s - loss: 0.0631 - accuracy: 0.9843 - val_loss: 0.2982 - val_accuracy: 0.9273
-Epoch 160/1000
-60000/60000 - 6s - loss: 0.0635 - accuracy: 0.9844 - val_loss: 0.2991 - val_accuracy: 0.9262
-Epoch 161/1000
-60000/60000 - 6s - loss: 0.0629 - accuracy: 0.9845 - val_loss: 0.3048 - val_accuracy: 0.9269
-Epoch 162/1000
-60000/60000 - 6s - loss: 0.0625 - accuracy: 0.9844 - val_loss: 0.3024 - val_accuracy: 0.9274
-Epoch 163/1000
-60000/60000 - 6s - loss: 0.0637 - accuracy: 0.9841 - val_loss: 0.3001 - val_accuracy: 0.9276
-Epoch 164/1000
-60000/60000 - 6s - loss: 0.0635 - accuracy: 0.9840 - val_loss: 0.2992 - val_accuracy: 0.9263
-Epoch 165/1000
-60000/60000 - 6s - loss: 0.0638 - accuracy: 0.9840 - val_loss: 0.3004 - val_accuracy: 0.9278
-Epoch 166/1000
-60000/60000 - 6s - loss: 0.0651 - accuracy: 0.9837 - val_loss: 0.3037 - val_accuracy: 0.9273
-Epoch 167/1000
-60000/60000 - 6s - loss: 0.0641 - accuracy: 0.9838 - val_loss: 0.3037 - val_accuracy: 0.9258
-Epoch 168/1000
-60000/60000 - 6s - loss: 0.0642 - accuracy: 0.9841 - val_loss: 0.2974 - val_accuracy: 0.9275
-Epoch 169/1000
-60000/60000 - 6s - loss: 0.0633 - accuracy: 0.9841 - val_loss: 0.3025 - val_accuracy: 0.9257
-Epoch 170/1000
-60000/60000 - 6s - loss: 0.0626 - accuracy: 0.9847 - val_loss: 0.3006 - val_accuracy: 0.9276
-Epoch 171/1000
-60000/60000 - 6s - loss: 0.0637 - accuracy: 0.9843 - val_loss: 0.3007 - val_accuracy: 0.9267
-Epoch 172/1000
-60000/60000 - 6s - loss: 0.0629 - accuracy: 0.9845 - val_loss: 0.3058 - val_accuracy: 0.9252
-Epoch 173/1000
-60000/60000 - 6s - loss: 0.0619 - accuracy: 0.9849 - val_loss: 0.3020 - val_accuracy: 0.9265
-Epoch 174/1000
-60000/60000 - 6s - loss: 0.0621 - accuracy: 0.9847 - val_loss: 0.3067 - val_accuracy: 0.9263
-Epoch 175/1000
-60000/60000 - 6s - loss: 0.0620 - accuracy: 0.9847 - val_loss: 0.3031 - val_accuracy: 0.9263
-Epoch 176/1000
-60000/60000 - 6s - loss: 0.0619 - accuracy: 0.9847 - val_loss: 0.3066 - val_accuracy: 0.9261
-Epoch 177/1000
-60000/60000 - 6s - loss: 0.0626 - accuracy: 0.9843 - val_loss: 0.3066 - val_accuracy: 0.9257
-Epoch 178/1000
-60000/60000 - 6s - loss: 0.0630 - accuracy: 0.9843 - val_loss: 0.3043 - val_accuracy: 0.9266
-Epoch 179/1000
-60000/60000 - 6s - loss: 0.0632 - accuracy: 0.9844 - val_loss: 0.3052 - val_accuracy: 0.9269
-Epoch 180/1000
-60000/60000 - 6s - loss: 0.0627 - accuracy: 0.9845 - val_loss: 0.3039 - val_accuracy: 0.9268
-Epoch 181/1000
-60000/60000 - 6s - loss: 0.0635 - accuracy: 0.9839 - val_loss: 0.3086 - val_accuracy: 0.9255
-Epoch 182/1000
-60000/60000 - 6s - loss: 0.0636 - accuracy: 0.9842 - val_loss: 0.3043 - val_accuracy: 0.9267
-Epoch 183/1000
-60000/60000 - 6s - loss: 0.0634 - accuracy: 0.9842 - val_loss: 0.3033 - val_accuracy: 0.9271
-Epoch 184/1000
-60000/60000 - 6s - loss: 0.0626 - accuracy: 0.9844 - val_loss: 0.3046 - val_accuracy: 0.9259
-Epoch 185/1000
-60000/60000 - 6s - loss: 0.0627 - accuracy: 0.9844 - val_loss: 0.3046 - val_accuracy: 0.9271
-Epoch 186/1000
-60000/60000 - 6s - loss: 0.0614 - accuracy: 0.9849 - val_loss: 0.3061 - val_accuracy: 0.9259
-Epoch 187/1000
-60000/60000 - 6s - loss: 0.0601 - accuracy: 0.9853 - val_loss: 0.3072 - val_accuracy: 0.9261
-Epoch 188/1000
-60000/60000 - 6s - loss: 0.0605 - accuracy: 0.9853 - val_loss: 0.3091 - val_accuracy: 0.9259
-Epoch 189/1000
-60000/60000 - 6s - loss: 0.0617 - accuracy: 0.9850 - val_loss: 0.3081 - val_accuracy: 0.9272
-Epoch 190/1000
-60000/60000 - 6s - loss: 0.0629 - accuracy: 0.9843 - val_loss: 0.3058 - val_accuracy: 0.9266
-Epoch 191/1000
-60000/60000 - 6s - loss: 0.0629 - accuracy: 0.9846 - val_loss: 0.3077 - val_accuracy: 0.9268
-Epoch 192/1000
-60000/60000 - 6s - loss: 0.0623 - accuracy: 0.9845 - val_loss: 0.3066 - val_accuracy: 0.9262
-Epoch 193/1000
-60000/60000 - 6s - loss: 0.0623 - accuracy: 0.9846 - val_loss: 0.3098 - val_accuracy: 0.9255
-Epoch 194/1000
-60000/60000 - 6s - loss: 0.0642 - accuracy: 0.9837 - val_loss: 0.3074 - val_accuracy: 0.9263
-Epoch 195/1000
-60000/60000 - 6s - loss: 0.0658 - accuracy: 0.9834 - val_loss: 0.3087 - val_accuracy: 0.9232
-Epoch 196/1000
-60000/60000 - 6s - loss: 0.0632 - accuracy: 0.9841 - val_loss: 0.3100 - val_accuracy: 0.9260
-Epoch 197/1000
-60000/60000 - 6s - loss: 0.0607 - accuracy: 0.9850 - val_loss: 0.3098 - val_accuracy: 0.9257
-Epoch 198/1000
-60000/60000 - 6s - loss: 0.0618 - accuracy: 0.9848 - val_loss: 0.3102 - val_accuracy: 0.9263
-Epoch 199/1000
-60000/60000 - 6s - loss: 0.0607 - accuracy: 0.9850 - val_loss: 0.3096 - val_accuracy: 0.9247
-Epoch 200/1000
-60000/60000 - 6s - loss: 0.0613 - accuracy: 0.9847 - val_loss: 0.3126 - val_accuracy: 0.9255
-Epoch 201/1000
-60000/60000 - 6s - loss: 0.0621 - accuracy: 0.9845 - val_loss: 0.3082 - val_accuracy: 0.9259
-Epoch 202/1000
-60000/60000 - 6s - loss: 0.0597 - accuracy: 0.9857 - val_loss: 0.3122 - val_accuracy: 0.9252
-Epoch 203/1000
-60000/60000 - 6s - loss: 0.0599 - accuracy: 0.9853 - val_loss: 0.3105 - val_accuracy: 0.9257
-Epoch 204/1000
-60000/60000 - 6s - loss: 0.0604 - accuracy: 0.9854 - val_loss: 0.3107 - val_accuracy: 0.9262
-Epoch 205/1000
-60000/60000 - 6s - loss: 0.0598 - accuracy: 0.9853 - val_loss: 0.3140 - val_accuracy: 0.9244
-Epoch 206/1000
-60000/60000 - 6s - loss: 0.0605 - accuracy: 0.9851 - val_loss: 0.3069 - val_accuracy: 0.9255
-Epoch 207/1000
-60000/60000 - 6s - loss: 0.0605 - accuracy: 0.9851 - val_loss: 0.3125 - val_accuracy: 0.9254
-Epoch 208/1000
-60000/60000 - 6s - loss: 0.0598 - accuracy: 0.9855 - val_loss: 0.3083 - val_accuracy: 0.9267
-Epoch 209/1000
-60000/60000 - 6s - loss: 0.0591 - accuracy: 0.9855 - val_loss: 0.3121 - val_accuracy: 0.9258
-Epoch 210/1000
-60000/60000 - 6s - loss: 0.0588 - accuracy: 0.9857 - val_loss: 0.3107 - val_accuracy: 0.9253
-Epoch 211/1000
-60000/60000 - 6s - loss: 0.0591 - accuracy: 0.9854 - val_loss: 0.3168 - val_accuracy: 0.9258
-Epoch 212/1000
-60000/60000 - 6s - loss: 0.0604 - accuracy: 0.9852 - val_loss: 0.3123 - val_accuracy: 0.9264
-Epoch 213/1000
-60000/60000 - 6s - loss: 0.0607 - accuracy: 0.9849 - val_loss: 0.3122 - val_accuracy: 0.9238
-Epoch 214/1000
-60000/60000 - 6s - loss: 0.0608 - accuracy: 0.9848 - val_loss: 0.3181 - val_accuracy: 0.9239
-Epoch 215/1000
-60000/60000 - 6s - loss: 0.0598 - accuracy: 0.9854 - val_loss: 0.3142 - val_accuracy: 0.9243
-Epoch 216/1000
-60000/60000 - 6s - loss: 0.0589 - accuracy: 0.9857 - val_loss: 0.3125 - val_accuracy: 0.9254
-Epoch 217/1000
-60000/60000 - 6s - loss: 0.0583 - accuracy: 0.9857 - val_loss: 0.3144 - val_accuracy: 0.9256
-Epoch 218/1000
-60000/60000 - 6s - loss: 0.0582 - accuracy: 0.9859 - val_loss: 0.3156 - val_accuracy: 0.9249
-Epoch 219/1000
-60000/60000 - 6s - loss: 0.0594 - accuracy: 0.9852 - val_loss: 0.3129 - val_accuracy: 0.9251
-Epoch 220/1000
-60000/60000 - 6s - loss: 0.0584 - accuracy: 0.9855 - val_loss: 0.3129 - val_accuracy: 0.9251
-Epoch 221/1000
-60000/60000 - 6s - loss: 0.0608 - accuracy: 0.9847 - val_loss: 0.3138 - val_accuracy: 0.9257
-Epoch 222/1000
-60000/60000 - 6s - loss: 0.0595 - accuracy: 0.9852 - val_loss: 0.3150 - val_accuracy: 0.9247
-Epoch 223/1000
-60000/60000 - 6s - loss: 0.0584 - accuracy: 0.9857 - val_loss: 0.3139 - val_accuracy: 0.9256
-Epoch 224/1000
-60000/60000 - 6s - loss: 0.0585 - accuracy: 0.9857 - val_loss: 0.3136 - val_accuracy: 0.9245
-Epoch 225/1000
-60000/60000 - 6s - loss: 0.0586 - accuracy: 0.9855 - val_loss: 0.3174 - val_accuracy: 0.9269
-Epoch 226/1000
-60000/60000 - 6s - loss: 0.0585 - accuracy: 0.9856 - val_loss: 0.3145 - val_accuracy: 0.9253
-Epoch 227/1000
-60000/60000 - 6s - loss: 0.0584 - accuracy: 0.9855 - val_loss: 0.3181 - val_accuracy: 0.9254
-Epoch 228/1000
-60000/60000 - 6s - loss: 0.0595 - accuracy: 0.9852 - val_loss: 0.3168 - val_accuracy: 0.9249
-Epoch 229/1000
-60000/60000 - 6s - loss: 0.0600 - accuracy: 0.9850 - val_loss: 0.3135 - val_accuracy: 0.9245
-Epoch 230/1000
-60000/60000 - 6s - loss: 0.0585 - accuracy: 0.9856 - val_loss: 0.3160 - val_accuracy: 0.9252
-Epoch 231/1000
-60000/60000 - 6s - loss: 0.0584 - accuracy: 0.9855 - val_loss: 0.3157 - val_accuracy: 0.9245
-Epoch 232/1000
-60000/60000 - 6s - loss: 0.0582 - accuracy: 0.9858 - val_loss: 0.3148 - val_accuracy: 0.9255
-Epoch 233/1000
-60000/60000 - 6s - loss: 0.0575 - accuracy: 0.9857 - val_loss: 0.3136 - val_accuracy: 0.9261
-Epoch 234/1000
-60000/60000 - 6s - loss: 0.0578 - accuracy: 0.9858 - val_loss: 0.3157 - val_accuracy: 0.9248
-Epoch 235/1000
-60000/60000 - 6s - loss: 0.0579 - accuracy: 0.9856 - val_loss: 0.3177 - val_accuracy: 0.9243
-Epoch 236/1000
-60000/60000 - 6s - loss: 0.0584 - accuracy: 0.9857 - val_loss: 0.3176 - val_accuracy: 0.9253
-Epoch 237/1000
-60000/60000 - 6s - loss: 0.0579 - accuracy: 0.9859 - val_loss: 0.3165 - val_accuracy: 0.9253
-Epoch 238/1000
-60000/60000 - 6s - loss: 0.0583 - accuracy: 0.9857 - val_loss: 0.3145 - val_accuracy: 0.9248
-Epoch 239/1000
-60000/60000 - 6s - loss: 0.0570 - accuracy: 0.9860 - val_loss: 0.3176 - val_accuracy: 0.9256
-Epoch 240/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9861 - val_loss: 0.3159 - val_accuracy: 0.9253
-Epoch 241/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9862 - val_loss: 0.3189 - val_accuracy: 0.9248
-Epoch 242/1000
-60000/60000 - 6s - loss: 0.0571 - accuracy: 0.9859 - val_loss: 0.3206 - val_accuracy: 0.9251
-Epoch 243/1000
-60000/60000 - 6s - loss: 0.0568 - accuracy: 0.9863 - val_loss: 0.3202 - val_accuracy: 0.9256
-Epoch 244/1000
-60000/60000 - 6s - loss: 0.0573 - accuracy: 0.9858 - val_loss: 0.3196 - val_accuracy: 0.9236
-Epoch 245/1000
-60000/60000 - 6s - loss: 0.0586 - accuracy: 0.9856 - val_loss: 0.3215 - val_accuracy: 0.9248
-Epoch 246/1000
-60000/60000 - 6s - loss: 0.0591 - accuracy: 0.9854 - val_loss: 0.3195 - val_accuracy: 0.9254
-Epoch 247/1000
-60000/60000 - 6s - loss: 0.0594 - accuracy: 0.9851 - val_loss: 0.3231 - val_accuracy: 0.9238
-Epoch 248/1000
-60000/60000 - 6s - loss: 0.0585 - accuracy: 0.9854 - val_loss: 0.3188 - val_accuracy: 0.9251
-Epoch 249/1000
-60000/60000 - 6s - loss: 0.0572 - accuracy: 0.9860 - val_loss: 0.3200 - val_accuracy: 0.9252
-Epoch 250/1000
-60000/60000 - 6s - loss: 0.0571 - accuracy: 0.9861 - val_loss: 0.3196 - val_accuracy: 0.9262
-Epoch 251/1000
-60000/60000 - 6s - loss: 0.0572 - accuracy: 0.9860 - val_loss: 0.3213 - val_accuracy: 0.9256
-Epoch 252/1000
-60000/60000 - 6s - loss: 0.0570 - accuracy: 0.9860 - val_loss: 0.3235 - val_accuracy: 0.9245
-Epoch 253/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9861 - val_loss: 0.3225 - val_accuracy: 0.9242
-Epoch 254/1000
-60000/60000 - 6s - loss: 0.0565 - accuracy: 0.9862 - val_loss: 0.3200 - val_accuracy: 0.9260
-Epoch 255/1000
-60000/60000 - 6s - loss: 0.0565 - accuracy: 0.9862 - val_loss: 0.3185 - val_accuracy: 0.9251
-Epoch 256/1000
-60000/60000 - 6s - loss: 0.0574 - accuracy: 0.9860 - val_loss: 0.3200 - val_accuracy: 0.9252
-Epoch 257/1000
-60000/60000 - 6s - loss: 0.0571 - accuracy: 0.9858 - val_loss: 0.3198 - val_accuracy: 0.9249
-Epoch 258/1000
-60000/60000 - 6s - loss: 0.0567 - accuracy: 0.9861 - val_loss: 0.3219 - val_accuracy: 0.9245
-Epoch 259/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9859 - val_loss: 0.3198 - val_accuracy: 0.9247
-Epoch 260/1000
-60000/60000 - 6s - loss: 0.0576 - accuracy: 0.9857 - val_loss: 0.3171 - val_accuracy: 0.9267
-Epoch 261/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9859 - val_loss: 0.3200 - val_accuracy: 0.9246
-Epoch 262/1000
-60000/60000 - 6s - loss: 0.0564 - accuracy: 0.9862 - val_loss: 0.3210 - val_accuracy: 0.9249
-Epoch 263/1000
-60000/60000 - 6s - loss: 0.0562 - accuracy: 0.9862 - val_loss: 0.3225 - val_accuracy: 0.9251
-Epoch 264/1000
-60000/60000 - 6s - loss: 0.0559 - accuracy: 0.9860 - val_loss: 0.3227 - val_accuracy: 0.9258
-Epoch 265/1000
-60000/60000 - 6s - loss: 0.0562 - accuracy: 0.9863 - val_loss: 0.3234 - val_accuracy: 0.9249
-Epoch 266/1000
-60000/60000 - 6s - loss: 0.0560 - accuracy: 0.9863 - val_loss: 0.3227 - val_accuracy: 0.9248
-Epoch 267/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9860 - val_loss: 0.3230 - val_accuracy: 0.9259
-Epoch 268/1000
-60000/60000 - 6s - loss: 0.0576 - accuracy: 0.9857 - val_loss: 0.3244 - val_accuracy: 0.9249
-Epoch 269/1000
-60000/60000 - 6s - loss: 0.0576 - accuracy: 0.9858 - val_loss: 0.3228 - val_accuracy: 0.9251
-Epoch 270/1000
-60000/60000 - 6s - loss: 0.0568 - accuracy: 0.9859 - val_loss: 0.3252 - val_accuracy: 0.9242
-Epoch 271/1000
-60000/60000 - 6s - loss: 0.0572 - accuracy: 0.9860 - val_loss: 0.3265 - val_accuracy: 0.9249
-Epoch 272/1000
-60000/60000 - 6s - loss: 0.0562 - accuracy: 0.9862 - val_loss: 0.3261 - val_accuracy: 0.9250
-Epoch 273/1000
-60000/60000 - 6s - loss: 0.0568 - accuracy: 0.9858 - val_loss: 0.3264 - val_accuracy: 0.9248
-Epoch 274/1000
-60000/60000 - 6s - loss: 0.0565 - accuracy: 0.9860 - val_loss: 0.3256 - val_accuracy: 0.9253
-Epoch 275/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9863 - val_loss: 0.3231 - val_accuracy: 0.9247
-Epoch 276/1000
-60000/60000 - 6s - loss: 0.0565 - accuracy: 0.9858 - val_loss: 0.3254 - val_accuracy: 0.9245
-Epoch 277/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9862 - val_loss: 0.3261 - val_accuracy: 0.9251
-Epoch 278/1000
-60000/60000 - 6s - loss: 0.0560 - accuracy: 0.9862 - val_loss: 0.3263 - val_accuracy: 0.9246
-Epoch 279/1000
-60000/60000 - 6s - loss: 0.0556 - accuracy: 0.9863 - val_loss: 0.3246 - val_accuracy: 0.9246
-Epoch 280/1000
-60000/60000 - 6s - loss: 0.0559 - accuracy: 0.9864 - val_loss: 0.3255 - val_accuracy: 0.9247
-Epoch 281/1000
-60000/60000 - 6s - loss: 0.0555 - accuracy: 0.9863 - val_loss: 0.3264 - val_accuracy: 0.9252
-Epoch 282/1000
-60000/60000 - 6s - loss: 0.0566 - accuracy: 0.9859 - val_loss: 0.3253 - val_accuracy: 0.9244
-Epoch 283/1000
-60000/60000 - 6s - loss: 0.0565 - accuracy: 0.9861 - val_loss: 0.3247 - val_accuracy: 0.9249
-Epoch 284/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9862 - val_loss: 0.3228 - val_accuracy: 0.9251
-Epoch 285/1000
-60000/60000 - 6s - loss: 0.0557 - accuracy: 0.9863 - val_loss: 0.3253 - val_accuracy: 0.9248
-Epoch 286/1000
-60000/60000 - 6s - loss: 0.0558 - accuracy: 0.9863 - val_loss: 0.3271 - val_accuracy: 0.9253
-Epoch 287/1000
-60000/60000 - 6s - loss: 0.0550 - accuracy: 0.9864 - val_loss: 0.3250 - val_accuracy: 0.9249
-Epoch 288/1000
-60000/60000 - 6s - loss: 0.0555 - accuracy: 0.9865 - val_loss: 0.3314 - val_accuracy: 0.9247
-Epoch 289/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9861 - val_loss: 0.3243 - val_accuracy: 0.9252
-Epoch 290/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9862 - val_loss: 0.3262 - val_accuracy: 0.9243
-Epoch 291/1000
-60000/60000 - 6s - loss: 0.0556 - accuracy: 0.9863 - val_loss: 0.3280 - val_accuracy: 0.9244
-Epoch 292/1000
-60000/60000 - 6s - loss: 0.0559 - accuracy: 0.9861 - val_loss: 0.3276 - val_accuracy: 0.9255
-Epoch 293/1000
-60000/60000 - 6s - loss: 0.0574 - accuracy: 0.9855 - val_loss: 0.3280 - val_accuracy: 0.9245
-Epoch 294/1000
-60000/60000 - 6s - loss: 0.0568 - accuracy: 0.9861 - val_loss: 0.3277 - val_accuracy: 0.9255
-Epoch 295/1000
-60000/60000 - 6s - loss: 0.0559 - accuracy: 0.9863 - val_loss: 0.3254 - val_accuracy: 0.9253
-Epoch 296/1000
-60000/60000 - 6s - loss: 0.0549 - accuracy: 0.9865 - val_loss: 0.3270 - val_accuracy: 0.9257
-Epoch 297/1000
-60000/60000 - 6s - loss: 0.0555 - accuracy: 0.9864 - val_loss: 0.3242 - val_accuracy: 0.9247
-Epoch 298/1000
-60000/60000 - 6s - loss: 0.0564 - accuracy: 0.9862 - val_loss: 0.3279 - val_accuracy: 0.9253
-Epoch 299/1000
-60000/60000 - 6s - loss: 0.0562 - accuracy: 0.9861 - val_loss: 0.3262 - val_accuracy: 0.9256
-Epoch 300/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9861 - val_loss: 0.3265 - val_accuracy: 0.9235
-Epoch 301/1000
-60000/60000 - 6s - loss: 0.0561 - accuracy: 0.9863 - val_loss: 0.3274 - val_accuracy: 0.9256
-Epoch 302/1000
-60000/60000 - 6s - loss: 0.0557 - accuracy: 0.9863 - val_loss: 0.3270 - val_accuracy: 0.9255
-Epoch 303/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9859 - val_loss: 0.3301 - val_accuracy: 0.9235
-Epoch 304/1000
-60000/60000 - 6s - loss: 0.0558 - accuracy: 0.9863 - val_loss: 0.3257 - val_accuracy: 0.9256
-Epoch 305/1000
-60000/60000 - 6s - loss: 0.0550 - accuracy: 0.9865 - val_loss: 0.3266 - val_accuracy: 0.9256
-Epoch 306/1000
-60000/60000 - 6s - loss: 0.0550 - accuracy: 0.9864 - val_loss: 0.3289 - val_accuracy: 0.9262
-Epoch 307/1000
-60000/60000 - 6s - loss: 0.0553 - accuracy: 0.9863 - val_loss: 0.3283 - val_accuracy: 0.9252
-Epoch 308/1000
-60000/60000 - 6s - loss: 0.0548 - accuracy: 0.9866 - val_loss: 0.3268 - val_accuracy: 0.9256
-Epoch 309/1000
-60000/60000 - 6s - loss: 0.0555 - accuracy: 0.9862 - val_loss: 0.3294 - val_accuracy: 0.9242
-Epoch 310/1000
-60000/60000 - 6s - loss: 0.0599 - accuracy: 0.9847 - val_loss: 0.3250 - val_accuracy: 0.9247
-Epoch 311/1000
-60000/60000 - 6s - loss: 0.0588 - accuracy: 0.9851 - val_loss: 0.3250 - val_accuracy: 0.9256
-Epoch 312/1000
-60000/60000 - 6s - loss: 0.0592 - accuracy: 0.9850 - val_loss: 0.3266 - val_accuracy: 0.9247
-Epoch 313/1000
-60000/60000 - 6s - loss: 0.0610 - accuracy: 0.9842 - val_loss: 0.3300 - val_accuracy: 0.9237
-Epoch 314/1000
-60000/60000 - 6s - loss: 0.0605 - accuracy: 0.9847 - val_loss: 0.3267 - val_accuracy: 0.9251
-Epoch 315/1000
-60000/60000 - 6s - loss: 0.0571 - accuracy: 0.9858 - val_loss: 0.3278 - val_accuracy: 0.9250
-Epoch 316/1000
-60000/60000 - 6s - loss: 0.0571 - accuracy: 0.9858 - val_loss: 0.3336 - val_accuracy: 0.9247
-Epoch 317/1000
-60000/60000 - 6s - loss: 0.0575 - accuracy: 0.9857 - val_loss: 0.3312 - val_accuracy: 0.9246
-Epoch 318/1000
-60000/60000 - 6s - loss: 0.0568 - accuracy: 0.9859 - val_loss: 0.3296 - val_accuracy: 0.9247
-Epoch 319/1000
-60000/60000 - 6s - loss: 0.0572 - accuracy: 0.9855 - val_loss: 0.3296 - val_accuracy: 0.9249
-Epoch 320/1000
-60000/60000 - 6s - loss: 0.0560 - accuracy: 0.9863 - val_loss: 0.3285 - val_accuracy: 0.9252
-Epoch 321/1000
-60000/60000 - 6s - loss: 0.0559 - accuracy: 0.9863 - val_loss: 0.3331 - val_accuracy: 0.9243
-Epoch 322/1000
-60000/60000 - 6s - loss: 0.0557 - accuracy: 0.9863 - val_loss: 0.3317 - val_accuracy: 0.9238
-Epoch 323/1000
-60000/60000 - 6s - loss: 0.0547 - accuracy: 0.9868 - val_loss: 0.3371 - val_accuracy: 0.9235
-Epoch 324/1000
-60000/60000 - 6s - loss: 0.0554 - accuracy: 0.9863 - val_loss: 0.3325 - val_accuracy: 0.9244
-Epoch 325/1000
-60000/60000 - 6s - loss: 0.0545 - accuracy: 0.9866 - val_loss: 0.3315 - val_accuracy: 0.9251
-Epoch 326/1000
-60000/60000 - 6s - loss: 0.0552 - accuracy: 0.9863 - val_loss: 0.3324 - val_accuracy: 0.9248
-Epoch 327/1000
-60000/60000 - 6s - loss: 0.0548 - accuracy: 0.9866 - val_loss: 0.3296 - val_accuracy: 0.9254
-Epoch 328/1000
-60000/60000 - 6s - loss: 0.0559 - accuracy: 0.9861 - val_loss: 0.3355 - val_accuracy: 0.9229
-Epoch 329/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9858 - val_loss: 0.3302 - val_accuracy: 0.9243
-Epoch 330/1000
-60000/60000 - 6s - loss: 0.0551 - accuracy: 0.9866 - val_loss: 0.3332 - val_accuracy: 0.9246
-Epoch 331/1000
-60000/60000 - 6s - loss: 0.0546 - accuracy: 0.9866 - val_loss: 0.3318 - val_accuracy: 0.9252
-Epoch 332/1000
-60000/60000 - 6s - loss: 0.0545 - accuracy: 0.9864 - val_loss: 0.3340 - val_accuracy: 0.9241
-Epoch 333/1000
-60000/60000 - 6s - loss: 0.0545 - accuracy: 0.9866 - val_loss: 0.3352 - val_accuracy: 0.9246
-Epoch 334/1000
-60000/60000 - 6s - loss: 0.0576 - accuracy: 0.9856 - val_loss: 0.3350 - val_accuracy: 0.9244
-Epoch 335/1000
-60000/60000 - 6s - loss: 0.0567 - accuracy: 0.9859 - val_loss: 0.3326 - val_accuracy: 0.9243
-Epoch 336/1000
-60000/60000 - 6s - loss: 0.0564 - accuracy: 0.9860 - val_loss: 0.3320 - val_accuracy: 0.9247
-Epoch 337/1000
-60000/60000 - 6s - loss: 0.0554 - accuracy: 0.9865 - val_loss: 0.3330 - val_accuracy: 0.9244
-Epoch 338/1000
-60000/60000 - 6s - loss: 0.0547 - accuracy: 0.9866 - val_loss: 0.3327 - val_accuracy: 0.9240
-Epoch 339/1000
-60000/60000 - 6s - loss: 0.0541 - accuracy: 0.9867 - val_loss: 0.3324 - val_accuracy: 0.9253
-Epoch 340/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9865 - val_loss: 0.3344 - val_accuracy: 0.9233
-Epoch 341/1000
-60000/60000 - 6s - loss: 0.0545 - accuracy: 0.9866 - val_loss: 0.3344 - val_accuracy: 0.9247
-Epoch 342/1000
-60000/60000 - 6s - loss: 0.0540 - accuracy: 0.9868 - val_loss: 0.3348 - val_accuracy: 0.9236
-Epoch 343/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9867 - val_loss: 0.3358 - val_accuracy: 0.9251
-Epoch 344/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9866 - val_loss: 0.3322 - val_accuracy: 0.9246
-Epoch 345/1000
-60000/60000 - 6s - loss: 0.0543 - accuracy: 0.9865 - val_loss: 0.3340 - val_accuracy: 0.9242
-Epoch 346/1000
-60000/60000 - 6s - loss: 0.0539 - accuracy: 0.9867 - val_loss: 0.3320 - val_accuracy: 0.9248
-Epoch 347/1000
-60000/60000 - 6s - loss: 0.0542 - accuracy: 0.9866 - val_loss: 0.3342 - val_accuracy: 0.9245
-Epoch 348/1000
-60000/60000 - 6s - loss: 0.0540 - accuracy: 0.9868 - val_loss: 0.3333 - val_accuracy: 0.9250
-Epoch 349/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9869 - val_loss: 0.3329 - val_accuracy: 0.9247
-Epoch 350/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9870 - val_loss: 0.3331 - val_accuracy: 0.9251
-Epoch 351/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9865 - val_loss: 0.3363 - val_accuracy: 0.9262
-Epoch 352/1000
-60000/60000 - 6s - loss: 0.0550 - accuracy: 0.9862 - val_loss: 0.3365 - val_accuracy: 0.9253
-Epoch 353/1000
-60000/60000 - 6s - loss: 0.0548 - accuracy: 0.9864 - val_loss: 0.3340 - val_accuracy: 0.9245
-Epoch 354/1000
-60000/60000 - 6s - loss: 0.0541 - accuracy: 0.9867 - val_loss: 0.3340 - val_accuracy: 0.9246
-Epoch 355/1000
-60000/60000 - 6s - loss: 0.0541 - accuracy: 0.9867 - val_loss: 0.3349 - val_accuracy: 0.9244
-Epoch 356/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9864 - val_loss: 0.3352 - val_accuracy: 0.9252
-Epoch 357/1000
-60000/60000 - 6s - loss: 0.0537 - accuracy: 0.9868 - val_loss: 0.3380 - val_accuracy: 0.9240
-Epoch 358/1000
-60000/60000 - 6s - loss: 0.0541 - accuracy: 0.9865 - val_loss: 0.3359 - val_accuracy: 0.9246
-Epoch 359/1000
-60000/60000 - 6s - loss: 0.0535 - accuracy: 0.9869 - val_loss: 0.3340 - val_accuracy: 0.9257
-Epoch 360/1000
-60000/60000 - 6s - loss: 0.0530 - accuracy: 0.9869 - val_loss: 0.3349 - val_accuracy: 0.9251
-Epoch 361/1000
-60000/60000 - 6s - loss: 0.0529 - accuracy: 0.9868 - val_loss: 0.3350 - val_accuracy: 0.9250
-Epoch 362/1000
-60000/60000 - 6s - loss: 0.0539 - accuracy: 0.9865 - val_loss: 0.3344 - val_accuracy: 0.9252
-Epoch 363/1000
-60000/60000 - 6s - loss: 0.0542 - accuracy: 0.9866 - val_loss: 0.3362 - val_accuracy: 0.9240
-Epoch 364/1000
-60000/60000 - 6s - loss: 0.0545 - accuracy: 0.9866 - val_loss: 0.3342 - val_accuracy: 0.9248
-Epoch 365/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9864 - val_loss: 0.3347 - val_accuracy: 0.9246
-Epoch 366/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9869 - val_loss: 0.3393 - val_accuracy: 0.9237
-Epoch 367/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9869 - val_loss: 0.3350 - val_accuracy: 0.9244
-Epoch 368/1000
-60000/60000 - 6s - loss: 0.0530 - accuracy: 0.9870 - val_loss: 0.3362 - val_accuracy: 0.9253
-Epoch 369/1000
-60000/60000 - 6s - loss: 0.0527 - accuracy: 0.9870 - val_loss: 0.3372 - val_accuracy: 0.9246
-Epoch 370/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9867 - val_loss: 0.3359 - val_accuracy: 0.9250
-Epoch 371/1000
-60000/60000 - 6s - loss: 0.0537 - accuracy: 0.9867 - val_loss: 0.3392 - val_accuracy: 0.9247
-Epoch 372/1000
-60000/60000 - 6s - loss: 0.0529 - accuracy: 0.9869 - val_loss: 0.3356 - val_accuracy: 0.9235
-Epoch 373/1000
-60000/60000 - 6s - loss: 0.0532 - accuracy: 0.9869 - val_loss: 0.3388 - val_accuracy: 0.9241
-Epoch 374/1000
-60000/60000 - 6s - loss: 0.0530 - accuracy: 0.9870 - val_loss: 0.3396 - val_accuracy: 0.9234
-Epoch 375/1000
-60000/60000 - 6s - loss: 0.0542 - accuracy: 0.9862 - val_loss: 0.3385 - val_accuracy: 0.9238
-Epoch 376/1000
-60000/60000 - 6s - loss: 0.0559 - accuracy: 0.9858 - val_loss: 0.3359 - val_accuracy: 0.9241
-Epoch 377/1000
-60000/60000 - 6s - loss: 0.0562 - accuracy: 0.9858 - val_loss: 0.3378 - val_accuracy: 0.9244
-Epoch 378/1000
-60000/60000 - 6s - loss: 0.0573 - accuracy: 0.9856 - val_loss: 0.3377 - val_accuracy: 0.9248
-Epoch 379/1000
-60000/60000 - 6s - loss: 0.0566 - accuracy: 0.9858 - val_loss: 0.3371 - val_accuracy: 0.9238
-Epoch 380/1000
-60000/60000 - 6s - loss: 0.0552 - accuracy: 0.9862 - val_loss: 0.3357 - val_accuracy: 0.9250
-Epoch 381/1000
-60000/60000 - 6s - loss: 0.0564 - accuracy: 0.9858 - val_loss: 0.3359 - val_accuracy: 0.9262
-Epoch 382/1000
-60000/60000 - 6s - loss: 0.0554 - accuracy: 0.9863 - val_loss: 0.3360 - val_accuracy: 0.9236
-Epoch 383/1000
-60000/60000 - 6s - loss: 0.0546 - accuracy: 0.9865 - val_loss: 0.3351 - val_accuracy: 0.9249
-Epoch 384/1000
-60000/60000 - 6s - loss: 0.0543 - accuracy: 0.9867 - val_loss: 0.3375 - val_accuracy: 0.9253
-Epoch 385/1000
-60000/60000 - 6s - loss: 0.0534 - accuracy: 0.9868 - val_loss: 0.3374 - val_accuracy: 0.9247
-Epoch 386/1000
-60000/60000 - 6s - loss: 0.0529 - accuracy: 0.9870 - val_loss: 0.3376 - val_accuracy: 0.9255
-Epoch 387/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9869 - val_loss: 0.3383 - val_accuracy: 0.9249
-Epoch 388/1000
-60000/60000 - 6s - loss: 0.0537 - accuracy: 0.9867 - val_loss: 0.3391 - val_accuracy: 0.9246
-Epoch 389/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9866 - val_loss: 0.3361 - val_accuracy: 0.9250
-Epoch 390/1000
-60000/60000 - 6s - loss: 0.0539 - accuracy: 0.9866 - val_loss: 0.3396 - val_accuracy: 0.9248
-Epoch 391/1000
-60000/60000 - 6s - loss: 0.0529 - accuracy: 0.9870 - val_loss: 0.3405 - val_accuracy: 0.9249
-Epoch 392/1000
-60000/60000 - 6s - loss: 0.0532 - accuracy: 0.9870 - val_loss: 0.3386 - val_accuracy: 0.9247
-Epoch 393/1000
-60000/60000 - 6s - loss: 0.0527 - accuracy: 0.9871 - val_loss: 0.3389 - val_accuracy: 0.9240
-Epoch 394/1000
-60000/60000 - 6s - loss: 0.0525 - accuracy: 0.9872 - val_loss: 0.3380 - val_accuracy: 0.9249
-Epoch 395/1000
-60000/60000 - 6s - loss: 0.0525 - accuracy: 0.9871 - val_loss: 0.3439 - val_accuracy: 0.9246
-Epoch 396/1000
-60000/60000 - 6s - loss: 0.0534 - accuracy: 0.9867 - val_loss: 0.3405 - val_accuracy: 0.9249
-Epoch 397/1000
-60000/60000 - 6s - loss: 0.0528 - accuracy: 0.9870 - val_loss: 0.3391 - val_accuracy: 0.9249
-Epoch 398/1000
-60000/60000 - 6s - loss: 0.0525 - accuracy: 0.9871 - val_loss: 0.3398 - val_accuracy: 0.9241
-Epoch 399/1000
-60000/60000 - 6s - loss: 0.0532 - accuracy: 0.9868 - val_loss: 0.3408 - val_accuracy: 0.9241
-Epoch 400/1000
-60000/60000 - 6s - loss: 0.0547 - accuracy: 0.9865 - val_loss: 0.3399 - val_accuracy: 0.9249
-Epoch 401/1000
-60000/60000 - 6s - loss: 0.0540 - accuracy: 0.9866 - val_loss: 0.3400 - val_accuracy: 0.9240
-Epoch 402/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9869 - val_loss: 0.3427 - val_accuracy: 0.9229
-Epoch 403/1000
-60000/60000 - 6s - loss: 0.0530 - accuracy: 0.9870 - val_loss: 0.3412 - val_accuracy: 0.9238
-Epoch 404/1000
-60000/60000 - 6s - loss: 0.0529 - accuracy: 0.9870 - val_loss: 0.3415 - val_accuracy: 0.9242
-Epoch 405/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9869 - val_loss: 0.3398 - val_accuracy: 0.9245
-Epoch 406/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9865 - val_loss: 0.3401 - val_accuracy: 0.9240
-Epoch 407/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9869 - val_loss: 0.3415 - val_accuracy: 0.9248
-Epoch 408/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9870 - val_loss: 0.3420 - val_accuracy: 0.9231
-Epoch 409/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9870 - val_loss: 0.3393 - val_accuracy: 0.9242
-Epoch 410/1000
-60000/60000 - 6s - loss: 0.0539 - accuracy: 0.9866 - val_loss: 0.3405 - val_accuracy: 0.9243
-Epoch 411/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9872 - val_loss: 0.3390 - val_accuracy: 0.9246
-Epoch 412/1000
-60000/60000 - 6s - loss: 0.0522 - accuracy: 0.9872 - val_loss: 0.3382 - val_accuracy: 0.9247
-Epoch 413/1000
-60000/60000 - 6s - loss: 0.0525 - accuracy: 0.9869 - val_loss: 0.3407 - val_accuracy: 0.9238
-Epoch 414/1000
-60000/60000 - 6s - loss: 0.0521 - accuracy: 0.9872 - val_loss: 0.3414 - val_accuracy: 0.9237
-Epoch 415/1000
-60000/60000 - 6s - loss: 0.0523 - accuracy: 0.9872 - val_loss: 0.3410 - val_accuracy: 0.9245
-Epoch 416/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9868 - val_loss: 0.3409 - val_accuracy: 0.9250
-Epoch 417/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9867 - val_loss: 0.3382 - val_accuracy: 0.9239
-Epoch 418/1000
-60000/60000 - 6s - loss: 0.0537 - accuracy: 0.9867 - val_loss: 0.3415 - val_accuracy: 0.9231
-Epoch 419/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9868 - val_loss: 0.3411 - val_accuracy: 0.9244
-Epoch 420/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9867 - val_loss: 0.3413 - val_accuracy: 0.9231
-Epoch 421/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9867 - val_loss: 0.3422 - val_accuracy: 0.9235
-Epoch 422/1000
-60000/60000 - 6s - loss: 0.0527 - accuracy: 0.9871 - val_loss: 0.3413 - val_accuracy: 0.9241
-Epoch 423/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9870 - val_loss: 0.3403 - val_accuracy: 0.9233
-Epoch 424/1000
-60000/60000 - 6s - loss: 0.0525 - accuracy: 0.9869 - val_loss: 0.3391 - val_accuracy: 0.9246
-Epoch 425/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9870 - val_loss: 0.3408 - val_accuracy: 0.9243
-Epoch 426/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9871 - val_loss: 0.3417 - val_accuracy: 0.9251
-Epoch 427/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9865 - val_loss: 0.3431 - val_accuracy: 0.9237
-Epoch 428/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9870 - val_loss: 0.3412 - val_accuracy: 0.9246
-Epoch 429/1000
-60000/60000 - 6s - loss: 0.0526 - accuracy: 0.9869 - val_loss: 0.3420 - val_accuracy: 0.9242
-Epoch 430/1000
-60000/60000 - 6s - loss: 0.0528 - accuracy: 0.9871 - val_loss: 0.3434 - val_accuracy: 0.9227
-Epoch 431/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9869 - val_loss: 0.3430 - val_accuracy: 0.9244
-Epoch 432/1000
-60000/60000 - 6s - loss: 0.0544 - accuracy: 0.9863 - val_loss: 0.3432 - val_accuracy: 0.9230
-Epoch 433/1000
-60000/60000 - 6s - loss: 0.0543 - accuracy: 0.9863 - val_loss: 0.3422 - val_accuracy: 0.9244
-Epoch 434/1000
-60000/60000 - 6s - loss: 0.0527 - accuracy: 0.9872 - val_loss: 0.3394 - val_accuracy: 0.9236
-Epoch 435/1000
-60000/60000 - 6s - loss: 0.0526 - accuracy: 0.9870 - val_loss: 0.3426 - val_accuracy: 0.9245
-Epoch 436/1000
-60000/60000 - 6s - loss: 0.0548 - accuracy: 0.9863 - val_loss: 0.3410 - val_accuracy: 0.9245
-Epoch 437/1000
-60000/60000 - 6s - loss: 0.0546 - accuracy: 0.9865 - val_loss: 0.3423 - val_accuracy: 0.9243
-Epoch 438/1000
-60000/60000 - 6s - loss: 0.0540 - accuracy: 0.9866 - val_loss: 0.3421 - val_accuracy: 0.9231
-Epoch 439/1000
-60000/60000 - 6s - loss: 0.0522 - accuracy: 0.9872 - val_loss: 0.3425 - val_accuracy: 0.9231
-Epoch 440/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9874 - val_loss: 0.3426 - val_accuracy: 0.9238
-Epoch 441/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9872 - val_loss: 0.3417 - val_accuracy: 0.9233
-Epoch 442/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9871 - val_loss: 0.3428 - val_accuracy: 0.9241
-Epoch 443/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9872 - val_loss: 0.3415 - val_accuracy: 0.9247
-Epoch 444/1000
-60000/60000 - 6s - loss: 0.0521 - accuracy: 0.9871 - val_loss: 0.3447 - val_accuracy: 0.9236
-Epoch 445/1000
-60000/60000 - 6s - loss: 0.0528 - accuracy: 0.9871 - val_loss: 0.3442 - val_accuracy: 0.9241
-Epoch 446/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9873 - val_loss: 0.3443 - val_accuracy: 0.9236
-Epoch 447/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9872 - val_loss: 0.3425 - val_accuracy: 0.9229
-Epoch 448/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9873 - val_loss: 0.3450 - val_accuracy: 0.9230
-Epoch 449/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9873 - val_loss: 0.3446 - val_accuracy: 0.9237
-Epoch 450/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9873 - val_loss: 0.3411 - val_accuracy: 0.9236
-Epoch 451/1000
-60000/60000 - 6s - loss: 0.0520 - accuracy: 0.9871 - val_loss: 0.3418 - val_accuracy: 0.9235
-Epoch 452/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9870 - val_loss: 0.3397 - val_accuracy: 0.9238
-Epoch 453/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9870 - val_loss: 0.3411 - val_accuracy: 0.9236
-Epoch 454/1000
-60000/60000 - 6s - loss: 0.0540 - accuracy: 0.9866 - val_loss: 0.3389 - val_accuracy: 0.9236
-Epoch 455/1000
-60000/60000 - 6s - loss: 0.0522 - accuracy: 0.9870 - val_loss: 0.3419 - val_accuracy: 0.9243
-Epoch 456/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9872 - val_loss: 0.3401 - val_accuracy: 0.9237
-Epoch 457/1000
-60000/60000 - 6s - loss: 0.0548 - accuracy: 0.9862 - val_loss: 0.3403 - val_accuracy: 0.9242
-Epoch 458/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9865 - val_loss: 0.3420 - val_accuracy: 0.9247
-Epoch 459/1000
-60000/60000 - 6s - loss: 0.0528 - accuracy: 0.9869 - val_loss: 0.3411 - val_accuracy: 0.9234
-Epoch 460/1000
-60000/60000 - 6s - loss: 0.0521 - accuracy: 0.9872 - val_loss: 0.3414 - val_accuracy: 0.9241
-Epoch 461/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9873 - val_loss: 0.3426 - val_accuracy: 0.9242
-Epoch 462/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9873 - val_loss: 0.3429 - val_accuracy: 0.9243
-Epoch 463/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9873 - val_loss: 0.3425 - val_accuracy: 0.9245
-Epoch 464/1000
-60000/60000 - 6s - loss: 0.0523 - accuracy: 0.9872 - val_loss: 0.3416 - val_accuracy: 0.9240
-Epoch 465/1000
-60000/60000 - 6s - loss: 0.0527 - accuracy: 0.9869 - val_loss: 0.3426 - val_accuracy: 0.9239
-Epoch 466/1000
-60000/60000 - 6s - loss: 0.0526 - accuracy: 0.9871 - val_loss: 0.3424 - val_accuracy: 0.9234
-Epoch 467/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9873 - val_loss: 0.3426 - val_accuracy: 0.9229
-Epoch 468/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9873 - val_loss: 0.3435 - val_accuracy: 0.9233
-Epoch 469/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9873 - val_loss: 0.3436 - val_accuracy: 0.9233
-Epoch 470/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9872 - val_loss: 0.3448 - val_accuracy: 0.9242
-Epoch 471/1000
-60000/60000 - 6s - loss: 0.0515 - accuracy: 0.9873 - val_loss: 0.3414 - val_accuracy: 0.9236
-Epoch 472/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9874 - val_loss: 0.3447 - val_accuracy: 0.9241
-Epoch 473/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9866 - val_loss: 0.3395 - val_accuracy: 0.9245
-Epoch 474/1000
-60000/60000 - 6s - loss: 0.0522 - accuracy: 0.9870 - val_loss: 0.3406 - val_accuracy: 0.9242
-Epoch 475/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9874 - val_loss: 0.3437 - val_accuracy: 0.9242
-Epoch 476/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9874 - val_loss: 0.3447 - val_accuracy: 0.9249
-Epoch 477/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9873 - val_loss: 0.3444 - val_accuracy: 0.9240
-Epoch 478/1000
-60000/60000 - 6s - loss: 0.0521 - accuracy: 0.9869 - val_loss: 0.3440 - val_accuracy: 0.9234
-Epoch 479/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9874 - val_loss: 0.3462 - val_accuracy: 0.9232
-Epoch 480/1000
-60000/60000 - 6s - loss: 0.0511 - accuracy: 0.9874 - val_loss: 0.3437 - val_accuracy: 0.9246
-Epoch 481/1000
-60000/60000 - 6s - loss: 0.0512 - accuracy: 0.9875 - val_loss: 0.3472 - val_accuracy: 0.9236
-Epoch 482/1000
-60000/60000 - 6s - loss: 0.0520 - accuracy: 0.9870 - val_loss: 0.3437 - val_accuracy: 0.9236
-Epoch 483/1000
-60000/60000 - 6s - loss: 0.0515 - accuracy: 0.9872 - val_loss: 0.3456 - val_accuracy: 0.9235
-Epoch 484/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9873 - val_loss: 0.3455 - val_accuracy: 0.9233
-Epoch 485/1000
-60000/60000 - 6s - loss: 0.0509 - accuracy: 0.9874 - val_loss: 0.3447 - val_accuracy: 0.9231
-Epoch 486/1000
-60000/60000 - 6s - loss: 0.0512 - accuracy: 0.9872 - val_loss: 0.3447 - val_accuracy: 0.9235
-Epoch 487/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9873 - val_loss: 0.3461 - val_accuracy: 0.9227
-Epoch 488/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9872 - val_loss: 0.3453 - val_accuracy: 0.9247
-Epoch 489/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9875 - val_loss: 0.3459 - val_accuracy: 0.9240
-Epoch 490/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9873 - val_loss: 0.3453 - val_accuracy: 0.9254
-Epoch 491/1000
-60000/60000 - 6s - loss: 0.0520 - accuracy: 0.9871 - val_loss: 0.3473 - val_accuracy: 0.9246
-Epoch 492/1000
-60000/60000 - 6s - loss: 0.0520 - accuracy: 0.9872 - val_loss: 0.3445 - val_accuracy: 0.9248
-Epoch 493/1000
-60000/60000 - 6s - loss: 0.0515 - accuracy: 0.9874 - val_loss: 0.3458 - val_accuracy: 0.9242
-Epoch 494/1000
-60000/60000 - 6s - loss: 0.0519 - accuracy: 0.9871 - val_loss: 0.3435 - val_accuracy: 0.9249
-Epoch 495/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9872 - val_loss: 0.3432 - val_accuracy: 0.9240
-Epoch 496/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9872 - val_loss: 0.3446 - val_accuracy: 0.9244
-Epoch 497/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9874 - val_loss: 0.3452 - val_accuracy: 0.9239
-Epoch 498/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9873 - val_loss: 0.3463 - val_accuracy: 0.9245
-Epoch 499/1000
-60000/60000 - 6s - loss: 0.0512 - accuracy: 0.9873 - val_loss: 0.3443 - val_accuracy: 0.9240
-Epoch 500/1000
-60000/60000 - 6s - loss: 0.0511 - accuracy: 0.9874 - val_loss: 0.3439 - val_accuracy: 0.9233
-Epoch 501/1000
-60000/60000 - 6s - loss: 0.0522 - accuracy: 0.9868 - val_loss: 0.3447 - val_accuracy: 0.9243
-Epoch 502/1000
-60000/60000 - 6s - loss: 0.0526 - accuracy: 0.9869 - val_loss: 0.3447 - val_accuracy: 0.9245
-Epoch 503/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9872 - val_loss: 0.3462 - val_accuracy: 0.9236
-Epoch 504/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9872 - val_loss: 0.3452 - val_accuracy: 0.9242
-Epoch 505/1000
-60000/60000 - 6s - loss: 0.0530 - accuracy: 0.9866 - val_loss: 0.3445 - val_accuracy: 0.9244
-Epoch 506/1000
-60000/60000 - 6s - loss: 0.0535 - accuracy: 0.9867 - val_loss: 0.3442 - val_accuracy: 0.9258
-Epoch 507/1000
-60000/60000 - 6s - loss: 0.0546 - accuracy: 0.9863 - val_loss: 0.3456 - val_accuracy: 0.9237
-Epoch 508/1000
-60000/60000 - 6s - loss: 0.0535 - accuracy: 0.9866 - val_loss: 0.3446 - val_accuracy: 0.9236
-Epoch 509/1000
-60000/60000 - 6s - loss: 0.0538 - accuracy: 0.9865 - val_loss: 0.3441 - val_accuracy: 0.9226
-Epoch 510/1000
-60000/60000 - 6s - loss: 0.0522 - accuracy: 0.9870 - val_loss: 0.3475 - val_accuracy: 0.9239
-Epoch 511/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9873 - val_loss: 0.3446 - val_accuracy: 0.9235
-Epoch 512/1000
-60000/60000 - 6s - loss: 0.0524 - accuracy: 0.9871 - val_loss: 0.3491 - val_accuracy: 0.9235
-Epoch 513/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9872 - val_loss: 0.3466 - val_accuracy: 0.9228
-Epoch 514/1000
-60000/60000 - 6s - loss: 0.0509 - accuracy: 0.9874 - val_loss: 0.3457 - val_accuracy: 0.9235
-Epoch 515/1000
-60000/60000 - 6s - loss: 0.0511 - accuracy: 0.9872 - val_loss: 0.3455 - val_accuracy: 0.9235
-Epoch 516/1000
-60000/60000 - 6s - loss: 0.0509 - accuracy: 0.9874 - val_loss: 0.3457 - val_accuracy: 0.9241
-Epoch 517/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9873 - val_loss: 0.3438 - val_accuracy: 0.9238
-Epoch 518/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9872 - val_loss: 0.3457 - val_accuracy: 0.9233
-Epoch 519/1000
-60000/60000 - 6s - loss: 0.0532 - accuracy: 0.9865 - val_loss: 0.3450 - val_accuracy: 0.9229
-Epoch 520/1000
-60000/60000 - 6s - loss: 0.0528 - accuracy: 0.9869 - val_loss: 0.3460 - val_accuracy: 0.9226
-Epoch 521/1000
-60000/60000 - 6s - loss: 0.0520 - accuracy: 0.9872 - val_loss: 0.3444 - val_accuracy: 0.9229
-Epoch 522/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9872 - val_loss: 0.3475 - val_accuracy: 0.9223
-Epoch 523/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9872 - val_loss: 0.3459 - val_accuracy: 0.9231
-Epoch 524/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9874 - val_loss: 0.3472 - val_accuracy: 0.9232
-Epoch 525/1000
-60000/60000 - 6s - loss: 0.0505 - accuracy: 0.9877 - val_loss: 0.3464 - val_accuracy: 0.9237
-Epoch 526/1000
-60000/60000 - 6s - loss: 0.0505 - accuracy: 0.9876 - val_loss: 0.3453 - val_accuracy: 0.9239
-Epoch 527/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9876 - val_loss: 0.3465 - val_accuracy: 0.9233
-Epoch 528/1000
-60000/60000 - 6s - loss: 0.0505 - accuracy: 0.9874 - val_loss: 0.3497 - val_accuracy: 0.9225
-Epoch 529/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9875 - val_loss: 0.3475 - val_accuracy: 0.9240
-Epoch 530/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9874 - val_loss: 0.3500 - val_accuracy: 0.9237
-Epoch 531/1000
-60000/60000 - 6s - loss: 0.0505 - accuracy: 0.9873 - val_loss: 0.3485 - val_accuracy: 0.9239
-Epoch 532/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9873 - val_loss: 0.3496 - val_accuracy: 0.9224
-Epoch 533/1000
-60000/60000 - 6s - loss: 0.0507 - accuracy: 0.9875 - val_loss: 0.3458 - val_accuracy: 0.9235
-Epoch 534/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9874 - val_loss: 0.3464 - val_accuracy: 0.9231
-Epoch 535/1000
-60000/60000 - 6s - loss: 0.0519 - accuracy: 0.9871 - val_loss: 0.3486 - val_accuracy: 0.9225
-Epoch 536/1000
-60000/60000 - 6s - loss: 0.0525 - accuracy: 0.9867 - val_loss: 0.3474 - val_accuracy: 0.9235
-Epoch 537/1000
-60000/60000 - 6s - loss: 0.0521 - accuracy: 0.9870 - val_loss: 0.3485 - val_accuracy: 0.9244
-Epoch 538/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9873 - val_loss: 0.3476 - val_accuracy: 0.9223
-Epoch 539/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9872 - val_loss: 0.3467 - val_accuracy: 0.9232
-Epoch 540/1000
-60000/60000 - 6s - loss: 0.0521 - accuracy: 0.9871 - val_loss: 0.3465 - val_accuracy: 0.9234
-Epoch 541/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9872 - val_loss: 0.3458 - val_accuracy: 0.9246
-Epoch 542/1000
-60000/60000 - 6s - loss: 0.0512 - accuracy: 0.9874 - val_loss: 0.3470 - val_accuracy: 0.9234
-Epoch 543/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9876 - val_loss: 0.3456 - val_accuracy: 0.9238
-Epoch 544/1000
-60000/60000 - 6s - loss: 0.0508 - accuracy: 0.9874 - val_loss: 0.3449 - val_accuracy: 0.9243
-Epoch 545/1000
-60000/60000 - 6s - loss: 0.0508 - accuracy: 0.9875 - val_loss: 0.3460 - val_accuracy: 0.9237
-Epoch 546/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9875 - val_loss: 0.3471 - val_accuracy: 0.9235
-Epoch 547/1000
-60000/60000 - 6s - loss: 0.0519 - accuracy: 0.9871 - val_loss: 0.3454 - val_accuracy: 0.9238
-Epoch 548/1000
-60000/60000 - 6s - loss: 0.0511 - accuracy: 0.9872 - val_loss: 0.3479 - val_accuracy: 0.9237
-Epoch 549/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9874 - val_loss: 0.3479 - val_accuracy: 0.9233
-Epoch 550/1000
-60000/60000 - 6s - loss: 0.0507 - accuracy: 0.9873 - val_loss: 0.3484 - val_accuracy: 0.9229
-Epoch 551/1000
-60000/60000 - 6s - loss: 0.0508 - accuracy: 0.9875 - val_loss: 0.3484 - val_accuracy: 0.9236
-Epoch 552/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9872 - val_loss: 0.3487 - val_accuracy: 0.9228
-Epoch 553/1000
-60000/60000 - 6s - loss: 0.0533 - accuracy: 0.9866 - val_loss: 0.3469 - val_accuracy: 0.9227
-Epoch 554/1000
-60000/60000 - 6s - loss: 0.0519 - accuracy: 0.9871 - val_loss: 0.3482 - val_accuracy: 0.9229
-Epoch 555/1000
-60000/60000 - 6s - loss: 0.0522 - accuracy: 0.9869 - val_loss: 0.3502 - val_accuracy: 0.9229
-Epoch 556/1000
-60000/60000 - 6s - loss: 0.0515 - accuracy: 0.9873 - val_loss: 0.3507 - val_accuracy: 0.9231
-Epoch 557/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9876 - val_loss: 0.3480 - val_accuracy: 0.9240
-Epoch 558/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9876 - val_loss: 0.3475 - val_accuracy: 0.9235
-Epoch 559/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9871 - val_loss: 0.3490 - val_accuracy: 0.9236
-Epoch 560/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9873 - val_loss: 0.3463 - val_accuracy: 0.9243
-Epoch 561/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9876 - val_loss: 0.3490 - val_accuracy: 0.9232
-Epoch 562/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9875 - val_loss: 0.3497 - val_accuracy: 0.9243
-Epoch 563/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9874 - val_loss: 0.3482 - val_accuracy: 0.9241
-Epoch 564/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9876 - val_loss: 0.3476 - val_accuracy: 0.9243
-Epoch 565/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9876 - val_loss: 0.3483 - val_accuracy: 0.9240
-Epoch 566/1000
-60000/60000 - 6s - loss: 0.0502 - accuracy: 0.9876 - val_loss: 0.3506 - val_accuracy: 0.9230
-Epoch 567/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9873 - val_loss: 0.3500 - val_accuracy: 0.9228
-Epoch 568/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9872 - val_loss: 0.3506 - val_accuracy: 0.9229
-Epoch 569/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9876 - val_loss: 0.3490 - val_accuracy: 0.9236
-Epoch 570/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9876 - val_loss: 0.3494 - val_accuracy: 0.9228
-Epoch 571/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9874 - val_loss: 0.3479 - val_accuracy: 0.9233
-Epoch 572/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9875 - val_loss: 0.3487 - val_accuracy: 0.9241
-Epoch 573/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9877 - val_loss: 0.3499 - val_accuracy: 0.9242
-Epoch 574/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9876 - val_loss: 0.3489 - val_accuracy: 0.9243
-Epoch 575/1000
-60000/60000 - 6s - loss: 0.0502 - accuracy: 0.9876 - val_loss: 0.3487 - val_accuracy: 0.9234
-Epoch 576/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9875 - val_loss: 0.3482 - val_accuracy: 0.9236
-Epoch 577/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9878 - val_loss: 0.3482 - val_accuracy: 0.9241
-Epoch 578/1000
-60000/60000 - 6s - loss: 0.0507 - accuracy: 0.9874 - val_loss: 0.3503 - val_accuracy: 0.9238
-Epoch 579/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9876 - val_loss: 0.3488 - val_accuracy: 0.9233
-Epoch 580/1000
-60000/60000 - 6s - loss: 0.0502 - accuracy: 0.9875 - val_loss: 0.3469 - val_accuracy: 0.9237
-Epoch 581/1000
-60000/60000 - 6s - loss: 0.0507 - accuracy: 0.9872 - val_loss: 0.3498 - val_accuracy: 0.9230
-Epoch 582/1000
-60000/60000 - 6s - loss: 0.0507 - accuracy: 0.9873 - val_loss: 0.3485 - val_accuracy: 0.9227
-Epoch 583/1000
-60000/60000 - 6s - loss: 0.0523 - accuracy: 0.9869 - val_loss: 0.3470 - val_accuracy: 0.9237
-Epoch 584/1000
-60000/60000 - 6s - loss: 0.0523 - accuracy: 0.9869 - val_loss: 0.3492 - val_accuracy: 0.9233
-Epoch 585/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9866 - val_loss: 0.3564 - val_accuracy: 0.9231
-Epoch 586/1000
-60000/60000 - 6s - loss: 0.0540 - accuracy: 0.9862 - val_loss: 0.3501 - val_accuracy: 0.9236
-Epoch 587/1000
-60000/60000 - 6s - loss: 0.0539 - accuracy: 0.9863 - val_loss: 0.3563 - val_accuracy: 0.9214
-Epoch 588/1000
-60000/60000 - 6s - loss: 0.0543 - accuracy: 0.9862 - val_loss: 0.3497 - val_accuracy: 0.9230
-Epoch 589/1000
-60000/60000 - 6s - loss: 0.0538 - accuracy: 0.9863 - val_loss: 0.3488 - val_accuracy: 0.9231
-Epoch 590/1000
-60000/60000 - 6s - loss: 0.0518 - accuracy: 0.9871 - val_loss: 0.3476 - val_accuracy: 0.9242
-Epoch 591/1000
-60000/60000 - 6s - loss: 0.0520 - accuracy: 0.9869 - val_loss: 0.3482 - val_accuracy: 0.9236
-Epoch 592/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9872 - val_loss: 0.3530 - val_accuracy: 0.9235
-Epoch 593/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9870 - val_loss: 0.3529 - val_accuracy: 0.9237
-Epoch 594/1000
-60000/60000 - 6s - loss: 0.0512 - accuracy: 0.9873 - val_loss: 0.3492 - val_accuracy: 0.9238
-Epoch 595/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9872 - val_loss: 0.3538 - val_accuracy: 0.9226
-Epoch 596/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9875 - val_loss: 0.3523 - val_accuracy: 0.9240
-Epoch 597/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9874 - val_loss: 0.3527 - val_accuracy: 0.9223
-Epoch 598/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9876 - val_loss: 0.3506 - val_accuracy: 0.9245
-Epoch 599/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9872 - val_loss: 0.3542 - val_accuracy: 0.9235
-Epoch 600/1000
-60000/60000 - 6s - loss: 0.0502 - accuracy: 0.9874 - val_loss: 0.3526 - val_accuracy: 0.9229
-Epoch 601/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9875 - val_loss: 0.3516 - val_accuracy: 0.9235
-Epoch 602/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9877 - val_loss: 0.3520 - val_accuracy: 0.9230
-Epoch 603/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9876 - val_loss: 0.3504 - val_accuracy: 0.9228
-Epoch 604/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9875 - val_loss: 0.3489 - val_accuracy: 0.9239
-Epoch 605/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9874 - val_loss: 0.3509 - val_accuracy: 0.9240
-Epoch 606/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9876 - val_loss: 0.3501 - val_accuracy: 0.9239
-Epoch 607/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9876 - val_loss: 0.3499 - val_accuracy: 0.9229
-Epoch 608/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9875 - val_loss: 0.3519 - val_accuracy: 0.9224
-Epoch 609/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9877 - val_loss: 0.3519 - val_accuracy: 0.9227
-Epoch 610/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9874 - val_loss: 0.3497 - val_accuracy: 0.9242
-Epoch 611/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9877 - val_loss: 0.3507 - val_accuracy: 0.9242
-Epoch 612/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9873 - val_loss: 0.3516 - val_accuracy: 0.9228
-Epoch 613/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9873 - val_loss: 0.3504 - val_accuracy: 0.9235
-Epoch 614/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9874 - val_loss: 0.3494 - val_accuracy: 0.9246
-Epoch 615/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9876 - val_loss: 0.3508 - val_accuracy: 0.9231
-Epoch 616/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9877 - val_loss: 0.3511 - val_accuracy: 0.9237
-Epoch 617/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9876 - val_loss: 0.3518 - val_accuracy: 0.9237
-Epoch 618/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9877 - val_loss: 0.3513 - val_accuracy: 0.9236
-Epoch 619/1000
-60000/60000 - 6s - loss: 0.0493 - accuracy: 0.9877 - val_loss: 0.3533 - val_accuracy: 0.9236
-Epoch 620/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9871 - val_loss: 0.3507 - val_accuracy: 0.9227
-Epoch 621/1000
-60000/60000 - 6s - loss: 0.0535 - accuracy: 0.9865 - val_loss: 0.3498 - val_accuracy: 0.9245
-Epoch 622/1000
-60000/60000 - 6s - loss: 0.0517 - accuracy: 0.9869 - val_loss: 0.3514 - val_accuracy: 0.9232
-Epoch 623/1000
-60000/60000 - 6s - loss: 0.0511 - accuracy: 0.9873 - val_loss: 0.3508 - val_accuracy: 0.9241
-Epoch 624/1000
-60000/60000 - 6s - loss: 0.0515 - accuracy: 0.9870 - val_loss: 0.3480 - val_accuracy: 0.9240
-Epoch 625/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9875 - val_loss: 0.3482 - val_accuracy: 0.9245
-Epoch 626/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9876 - val_loss: 0.3486 - val_accuracy: 0.9236
-Epoch 627/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9875 - val_loss: 0.3491 - val_accuracy: 0.9233
-Epoch 628/1000
-60000/60000 - 6s - loss: 0.0502 - accuracy: 0.9876 - val_loss: 0.3465 - val_accuracy: 0.9241
-Epoch 629/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9874 - val_loss: 0.3486 - val_accuracy: 0.9232
-Epoch 630/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9876 - val_loss: 0.3495 - val_accuracy: 0.9239
-Epoch 631/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9876 - val_loss: 0.3492 - val_accuracy: 0.9227
-Epoch 632/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9877 - val_loss: 0.3476 - val_accuracy: 0.9244
-Epoch 633/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9876 - val_loss: 0.3501 - val_accuracy: 0.9238
-Epoch 634/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9878 - val_loss: 0.3491 - val_accuracy: 0.9246
-Epoch 635/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9877 - val_loss: 0.3488 - val_accuracy: 0.9233
-Epoch 636/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9875 - val_loss: 0.3477 - val_accuracy: 0.9250
-Epoch 637/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9875 - val_loss: 0.3494 - val_accuracy: 0.9243
-Epoch 638/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9872 - val_loss: 0.3474 - val_accuracy: 0.9241
-Epoch 639/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9875 - val_loss: 0.3472 - val_accuracy: 0.9247
-Epoch 640/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9877 - val_loss: 0.3487 - val_accuracy: 0.9232
-Epoch 641/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9875 - val_loss: 0.3496 - val_accuracy: 0.9233
-Epoch 642/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9875 - val_loss: 0.3510 - val_accuracy: 0.9236
-Epoch 643/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9875 - val_loss: 0.3507 - val_accuracy: 0.9241
-Epoch 644/1000
-60000/60000 - 6s - loss: 0.0493 - accuracy: 0.9878 - val_loss: 0.3514 - val_accuracy: 0.9241
-Epoch 645/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9876 - val_loss: 0.3499 - val_accuracy: 0.9242
-Epoch 646/1000
-60000/60000 - 6s - loss: 0.0505 - accuracy: 0.9874 - val_loss: 0.3500 - val_accuracy: 0.9242
-Epoch 647/1000
-60000/60000 - 6s - loss: 0.0505 - accuracy: 0.9872 - val_loss: 0.3494 - val_accuracy: 0.9243
-Epoch 648/1000
-60000/60000 - 6s - loss: 0.0508 - accuracy: 0.9871 - val_loss: 0.3512 - val_accuracy: 0.9241
-Epoch 649/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9874 - val_loss: 0.3516 - val_accuracy: 0.9241
-Epoch 650/1000
-60000/60000 - 6s - loss: 0.0502 - accuracy: 0.9874 - val_loss: 0.3492 - val_accuracy: 0.9236
-Epoch 651/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9876 - val_loss: 0.3501 - val_accuracy: 0.9239
-Epoch 652/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9872 - val_loss: 0.3518 - val_accuracy: 0.9236
-Epoch 653/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9877 - val_loss: 0.3506 - val_accuracy: 0.9235
-Epoch 654/1000
-60000/60000 - 6s - loss: 0.0505 - accuracy: 0.9873 - val_loss: 0.3519 - val_accuracy: 0.9225
-Epoch 655/1000
-60000/60000 - 6s - loss: 0.0509 - accuracy: 0.9869 - val_loss: 0.3521 - val_accuracy: 0.9227
-Epoch 656/1000
-60000/60000 - 6s - loss: 0.0509 - accuracy: 0.9872 - val_loss: 0.3510 - val_accuracy: 0.9241
-Epoch 657/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9875 - val_loss: 0.3518 - val_accuracy: 0.9231
-Epoch 658/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9875 - val_loss: 0.3501 - val_accuracy: 0.9247
-Epoch 659/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9876 - val_loss: 0.3513 - val_accuracy: 0.9245
-Epoch 660/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9876 - val_loss: 0.3509 - val_accuracy: 0.9240
-Epoch 661/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9878 - val_loss: 0.3518 - val_accuracy: 0.9238
-Epoch 662/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9876 - val_loss: 0.3515 - val_accuracy: 0.9236
-Epoch 663/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9877 - val_loss: 0.3487 - val_accuracy: 0.9243
-Epoch 664/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9877 - val_loss: 0.3515 - val_accuracy: 0.9241
-Epoch 665/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9877 - val_loss: 0.3503 - val_accuracy: 0.9239
-Epoch 666/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9877 - val_loss: 0.3503 - val_accuracy: 0.9242
-Epoch 667/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9877 - val_loss: 0.3522 - val_accuracy: 0.9237
-Epoch 668/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9873 - val_loss: 0.3495 - val_accuracy: 0.9240
-Epoch 669/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9876 - val_loss: 0.3513 - val_accuracy: 0.9234
-Epoch 670/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9877 - val_loss: 0.3506 - val_accuracy: 0.9242
-Epoch 671/1000
-60000/60000 - 6s - loss: 0.0509 - accuracy: 0.9872 - val_loss: 0.3537 - val_accuracy: 0.9237
-Epoch 672/1000
-60000/60000 - 6s - loss: 0.0504 - accuracy: 0.9874 - val_loss: 0.3522 - val_accuracy: 0.9238
-Epoch 673/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9877 - val_loss: 0.3525 - val_accuracy: 0.9239
-Epoch 674/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9875 - val_loss: 0.3552 - val_accuracy: 0.9234
-Epoch 675/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9876 - val_loss: 0.3534 - val_accuracy: 0.9240
-Epoch 676/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9876 - val_loss: 0.3540 - val_accuracy: 0.9239
-Epoch 677/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9870 - val_loss: 0.3576 - val_accuracy: 0.9227
-Epoch 678/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9873 - val_loss: 0.3513 - val_accuracy: 0.9243
-Epoch 679/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9877 - val_loss: 0.3523 - val_accuracy: 0.9236
-Epoch 680/1000
-60000/60000 - 6s - loss: 0.0493 - accuracy: 0.9876 - val_loss: 0.3524 - val_accuracy: 0.9234
-Epoch 681/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9878 - val_loss: 0.3511 - val_accuracy: 0.9232
-Epoch 682/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9878 - val_loss: 0.3511 - val_accuracy: 0.9240
-Epoch 683/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9875 - val_loss: 0.3521 - val_accuracy: 0.9225
-Epoch 684/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9879 - val_loss: 0.3503 - val_accuracy: 0.9232
-Epoch 685/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9877 - val_loss: 0.3514 - val_accuracy: 0.9232
-Epoch 686/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9864 - val_loss: 0.3504 - val_accuracy: 0.9234
-Epoch 687/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9872 - val_loss: 0.3512 - val_accuracy: 0.9230
-Epoch 688/1000
-60000/60000 - 6s - loss: 0.0514 - accuracy: 0.9871 - val_loss: 0.3502 - val_accuracy: 0.9239
-Epoch 689/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9875 - val_loss: 0.3528 - val_accuracy: 0.9230
-Epoch 690/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9876 - val_loss: 0.3539 - val_accuracy: 0.9235
-Epoch 691/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9878 - val_loss: 0.3524 - val_accuracy: 0.9234
-Epoch 692/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9878 - val_loss: 0.3510 - val_accuracy: 0.9230
-Epoch 693/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9879 - val_loss: 0.3532 - val_accuracy: 0.9234
-Epoch 694/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3551 - val_accuracy: 0.9229
-Epoch 695/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9877 - val_loss: 0.3522 - val_accuracy: 0.9229
-Epoch 696/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3537 - val_accuracy: 0.9232
-Epoch 697/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9877 - val_loss: 0.3516 - val_accuracy: 0.9237
-Epoch 698/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9877 - val_loss: 0.3522 - val_accuracy: 0.9226
-Epoch 699/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9879 - val_loss: 0.3537 - val_accuracy: 0.9220
-Epoch 700/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9875 - val_loss: 0.3495 - val_accuracy: 0.9238
-Epoch 701/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9878 - val_loss: 0.3517 - val_accuracy: 0.9233
-Epoch 702/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9877 - val_loss: 0.3526 - val_accuracy: 0.9224
-Epoch 703/1000
-60000/60000 - 6s - loss: 0.0493 - accuracy: 0.9876 - val_loss: 0.3536 - val_accuracy: 0.9228
-Epoch 704/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9874 - val_loss: 0.3540 - val_accuracy: 0.9223
-Epoch 705/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9876 - val_loss: 0.3544 - val_accuracy: 0.9230
-Epoch 706/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9877 - val_loss: 0.3514 - val_accuracy: 0.9226
-Epoch 707/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3526 - val_accuracy: 0.9233
-Epoch 708/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9876 - val_loss: 0.3551 - val_accuracy: 0.9211
-Epoch 709/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9876 - val_loss: 0.3531 - val_accuracy: 0.9231
-Epoch 710/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9879 - val_loss: 0.3539 - val_accuracy: 0.9227
-Epoch 711/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9879 - val_loss: 0.3545 - val_accuracy: 0.9221
-Epoch 712/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9877 - val_loss: 0.3519 - val_accuracy: 0.9228
-Epoch 713/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9878 - val_loss: 0.3526 - val_accuracy: 0.9236
-Epoch 714/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9879 - val_loss: 0.3539 - val_accuracy: 0.9234
-Epoch 715/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3514 - val_accuracy: 0.9230
-Epoch 716/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9877 - val_loss: 0.3509 - val_accuracy: 0.9238
-Epoch 717/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9878 - val_loss: 0.3528 - val_accuracy: 0.9236
-Epoch 718/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3521 - val_accuracy: 0.9230
-Epoch 719/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9880 - val_loss: 0.3512 - val_accuracy: 0.9231
-Epoch 720/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3526 - val_accuracy: 0.9229
-Epoch 721/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9877 - val_loss: 0.3558 - val_accuracy: 0.9228
-Epoch 722/1000
-60000/60000 - 6s - loss: 0.0493 - accuracy: 0.9878 - val_loss: 0.3540 - val_accuracy: 0.9233
-Epoch 723/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9879 - val_loss: 0.3538 - val_accuracy: 0.9234
-Epoch 724/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9879 - val_loss: 0.3535 - val_accuracy: 0.9227
-Epoch 725/1000
-60000/60000 - 6s - loss: 0.0497 - accuracy: 0.9875 - val_loss: 0.3508 - val_accuracy: 0.9241
-Epoch 726/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9877 - val_loss: 0.3535 - val_accuracy: 0.9231
-Epoch 727/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9873 - val_loss: 0.3535 - val_accuracy: 0.9247
-Epoch 728/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9868 - val_loss: 0.3582 - val_accuracy: 0.9231
-Epoch 729/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9872 - val_loss: 0.3550 - val_accuracy: 0.9235
-Epoch 730/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9876 - val_loss: 0.3529 - val_accuracy: 0.9237
-Epoch 731/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9879 - val_loss: 0.3538 - val_accuracy: 0.9233
-Epoch 732/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9874 - val_loss: 0.3555 - val_accuracy: 0.9230
-Epoch 733/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9877 - val_loss: 0.3560 - val_accuracy: 0.9227
-Epoch 734/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9876 - val_loss: 0.3557 - val_accuracy: 0.9229
-Epoch 735/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3548 - val_accuracy: 0.9227
-Epoch 736/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3533 - val_accuracy: 0.9235
-Epoch 737/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3524 - val_accuracy: 0.9231
-Epoch 738/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9878 - val_loss: 0.3565 - val_accuracy: 0.9233
-Epoch 739/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3555 - val_accuracy: 0.9229
-Epoch 740/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9878 - val_loss: 0.3548 - val_accuracy: 0.9232
-Epoch 741/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9880 - val_loss: 0.3560 - val_accuracy: 0.9220
-Epoch 742/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9878 - val_loss: 0.3575 - val_accuracy: 0.9232
-Epoch 743/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9877 - val_loss: 0.3560 - val_accuracy: 0.9223
-Epoch 744/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9877 - val_loss: 0.3579 - val_accuracy: 0.9222
-Epoch 745/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9878 - val_loss: 0.3564 - val_accuracy: 0.9230
-Epoch 746/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3558 - val_accuracy: 0.9230
-Epoch 747/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3567 - val_accuracy: 0.9232
-Epoch 748/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9879 - val_loss: 0.3555 - val_accuracy: 0.9225
-Epoch 749/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3542 - val_accuracy: 0.9230
-Epoch 750/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3561 - val_accuracy: 0.9224
-Epoch 751/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3569 - val_accuracy: 0.9230
-Epoch 752/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3554 - val_accuracy: 0.9229
-Epoch 753/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9872 - val_loss: 0.3572 - val_accuracy: 0.9213
-Epoch 754/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9877 - val_loss: 0.3560 - val_accuracy: 0.9228
-Epoch 755/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3576 - val_accuracy: 0.9225
-Epoch 756/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9880 - val_loss: 0.3567 - val_accuracy: 0.9232
-Epoch 757/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3547 - val_accuracy: 0.9224
-Epoch 758/1000
-60000/60000 - 6s - loss: 0.0500 - accuracy: 0.9873 - val_loss: 0.3540 - val_accuracy: 0.9210
-Epoch 759/1000
-60000/60000 - 6s - loss: 0.0499 - accuracy: 0.9874 - val_loss: 0.3569 - val_accuracy: 0.9222
-Epoch 760/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9877 - val_loss: 0.3578 - val_accuracy: 0.9218
-Epoch 761/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9876 - val_loss: 0.3578 - val_accuracy: 0.9228
-Epoch 762/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9877 - val_loss: 0.3574 - val_accuracy: 0.9224
-Epoch 763/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3558 - val_accuracy: 0.9231
-Epoch 764/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9879 - val_loss: 0.3565 - val_accuracy: 0.9228
-Epoch 765/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9878 - val_loss: 0.3564 - val_accuracy: 0.9226
-Epoch 766/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9877 - val_loss: 0.3564 - val_accuracy: 0.9221
-Epoch 767/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3575 - val_accuracy: 0.9223
-Epoch 768/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3565 - val_accuracy: 0.9229
-Epoch 769/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9876 - val_loss: 0.3544 - val_accuracy: 0.9229
-Epoch 770/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3559 - val_accuracy: 0.9233
-Epoch 771/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9877 - val_loss: 0.3541 - val_accuracy: 0.9228
-Epoch 772/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9875 - val_loss: 0.3570 - val_accuracy: 0.9226
-Epoch 773/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9877 - val_loss: 0.3549 - val_accuracy: 0.9233
-Epoch 774/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9877 - val_loss: 0.3600 - val_accuracy: 0.9224
-Epoch 775/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9877 - val_loss: 0.3582 - val_accuracy: 0.9222
-Epoch 776/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9879 - val_loss: 0.3587 - val_accuracy: 0.9227
-Epoch 777/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9877 - val_loss: 0.3586 - val_accuracy: 0.9222
-Epoch 778/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3597 - val_accuracy: 0.9217
-Epoch 779/1000
-60000/60000 - 6s - loss: 0.0507 - accuracy: 0.9871 - val_loss: 0.3596 - val_accuracy: 0.9211
-Epoch 780/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9877 - val_loss: 0.3577 - val_accuracy: 0.9216
-Epoch 781/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9875 - val_loss: 0.3569 - val_accuracy: 0.9224
-Epoch 782/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9876 - val_loss: 0.3570 - val_accuracy: 0.9232
-Epoch 783/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9877 - val_loss: 0.3577 - val_accuracy: 0.9228
-Epoch 784/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3580 - val_accuracy: 0.9224
-Epoch 785/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3572 - val_accuracy: 0.9223
-Epoch 786/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9876 - val_loss: 0.3575 - val_accuracy: 0.9234
-Epoch 787/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9878 - val_loss: 0.3574 - val_accuracy: 0.9228
-Epoch 788/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9878 - val_loss: 0.3556 - val_accuracy: 0.9233
-Epoch 789/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9878 - val_loss: 0.3570 - val_accuracy: 0.9228
-Epoch 790/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3570 - val_accuracy: 0.9232
-Epoch 791/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9877 - val_loss: 0.3584 - val_accuracy: 0.9224
-Epoch 792/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9878 - val_loss: 0.3575 - val_accuracy: 0.9227
-Epoch 793/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9879 - val_loss: 0.3574 - val_accuracy: 0.9226
-Epoch 794/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9877 - val_loss: 0.3581 - val_accuracy: 0.9219
-Epoch 795/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3582 - val_accuracy: 0.9219
-Epoch 796/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9879 - val_loss: 0.3576 - val_accuracy: 0.9224
-Epoch 797/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9879 - val_loss: 0.3572 - val_accuracy: 0.9235
-Epoch 798/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3578 - val_accuracy: 0.9226
-Epoch 799/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9882 - val_loss: 0.3584 - val_accuracy: 0.9223
-Epoch 800/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3595 - val_accuracy: 0.9222
-Epoch 801/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9881 - val_loss: 0.3581 - val_accuracy: 0.9217
-Epoch 802/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9879 - val_loss: 0.3597 - val_accuracy: 0.9233
-Epoch 803/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9877 - val_loss: 0.3590 - val_accuracy: 0.9226
-Epoch 804/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9879 - val_loss: 0.3609 - val_accuracy: 0.9221
-Epoch 805/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9877 - val_loss: 0.3583 - val_accuracy: 0.9221
-Epoch 806/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3585 - val_accuracy: 0.9210
-Epoch 807/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9877 - val_loss: 0.3591 - val_accuracy: 0.9224
-Epoch 808/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3579 - val_accuracy: 0.9224
-Epoch 809/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3580 - val_accuracy: 0.9219
-Epoch 810/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3590 - val_accuracy: 0.9227
-Epoch 811/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9881 - val_loss: 0.3603 - val_accuracy: 0.9218
-Epoch 812/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3594 - val_accuracy: 0.9230
-Epoch 813/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9879 - val_loss: 0.3589 - val_accuracy: 0.9221
-Epoch 814/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3590 - val_accuracy: 0.9213
-Epoch 815/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9879 - val_loss: 0.3583 - val_accuracy: 0.9218
-Epoch 816/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3612 - val_accuracy: 0.9218
-Epoch 817/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9879 - val_loss: 0.3597 - val_accuracy: 0.9217
-Epoch 818/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3575 - val_accuracy: 0.9228
-Epoch 819/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3585 - val_accuracy: 0.9218
-Epoch 820/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9879 - val_loss: 0.3596 - val_accuracy: 0.9220
-Epoch 821/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3588 - val_accuracy: 0.9215
-Epoch 822/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9871 - val_loss: 0.3606 - val_accuracy: 0.9225
-Epoch 823/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9873 - val_loss: 0.3574 - val_accuracy: 0.9227
-Epoch 824/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9878 - val_loss: 0.3574 - val_accuracy: 0.9216
-Epoch 825/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3584 - val_accuracy: 0.9223
-Epoch 826/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9880 - val_loss: 0.3587 - val_accuracy: 0.9223
-Epoch 827/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9879 - val_loss: 0.3587 - val_accuracy: 0.9217
-Epoch 828/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9879 - val_loss: 0.3586 - val_accuracy: 0.9227
-Epoch 829/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3580 - val_accuracy: 0.9225
-Epoch 830/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3591 - val_accuracy: 0.9225
-Epoch 831/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9874 - val_loss: 0.3617 - val_accuracy: 0.9226
-Epoch 832/1000
-60000/60000 - 6s - loss: 0.0502 - accuracy: 0.9873 - val_loss: 0.3625 - val_accuracy: 0.9220
-Epoch 833/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9879 - val_loss: 0.3601 - val_accuracy: 0.9217
-Epoch 834/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9877 - val_loss: 0.3620 - val_accuracy: 0.9218
-Epoch 835/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9879 - val_loss: 0.3591 - val_accuracy: 0.9227
-Epoch 836/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9882 - val_loss: 0.3582 - val_accuracy: 0.9221
-Epoch 837/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3584 - val_accuracy: 0.9228
-Epoch 838/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3582 - val_accuracy: 0.9227
-Epoch 839/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3580 - val_accuracy: 0.9228
-Epoch 840/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9877 - val_loss: 0.3598 - val_accuracy: 0.9231
-Epoch 841/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3603 - val_accuracy: 0.9218
-Epoch 842/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9881 - val_loss: 0.3595 - val_accuracy: 0.9221
-Epoch 843/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9879 - val_loss: 0.3590 - val_accuracy: 0.9224
-Epoch 844/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9878 - val_loss: 0.3617 - val_accuracy: 0.9233
-Epoch 845/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3621 - val_accuracy: 0.9221
-Epoch 846/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3605 - val_accuracy: 0.9219
-Epoch 847/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9881 - val_loss: 0.3607 - val_accuracy: 0.9221
-Epoch 848/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3599 - val_accuracy: 0.9227
-Epoch 849/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3596 - val_accuracy: 0.9221
-Epoch 850/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3605 - val_accuracy: 0.9233
-Epoch 851/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9881 - val_loss: 0.3597 - val_accuracy: 0.9225
-Epoch 852/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9878 - val_loss: 0.3602 - val_accuracy: 0.9223
-Epoch 853/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9877 - val_loss: 0.3592 - val_accuracy: 0.9213
-Epoch 854/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9877 - val_loss: 0.3591 - val_accuracy: 0.9243
-Epoch 855/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3604 - val_accuracy: 0.9221
-Epoch 856/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9881 - val_loss: 0.3579 - val_accuracy: 0.9227
-Epoch 857/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3589 - val_accuracy: 0.9220
-Epoch 858/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3607 - val_accuracy: 0.9227
-Epoch 859/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9879 - val_loss: 0.3605 - val_accuracy: 0.9223
-Epoch 860/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9881 - val_loss: 0.3605 - val_accuracy: 0.9231
-Epoch 861/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9880 - val_loss: 0.3608 - val_accuracy: 0.9220
-Epoch 862/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9881 - val_loss: 0.3624 - val_accuracy: 0.9226
-Epoch 863/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3622 - val_accuracy: 0.9220
-Epoch 864/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3607 - val_accuracy: 0.9213
-Epoch 865/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3605 - val_accuracy: 0.9220
-Epoch 866/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9882 - val_loss: 0.3602 - val_accuracy: 0.9217
-Epoch 867/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9880 - val_loss: 0.3611 - val_accuracy: 0.9223
-Epoch 868/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9880 - val_loss: 0.3598 - val_accuracy: 0.9222
-Epoch 869/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3610 - val_accuracy: 0.9213
-Epoch 870/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3596 - val_accuracy: 0.9222
-Epoch 871/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3607 - val_accuracy: 0.9219
-Epoch 872/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9879 - val_loss: 0.3591 - val_accuracy: 0.9227
-Epoch 873/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9879 - val_loss: 0.3609 - val_accuracy: 0.9215
-Epoch 874/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9881 - val_loss: 0.3609 - val_accuracy: 0.9219
-Epoch 875/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3608 - val_accuracy: 0.9212
-Epoch 876/1000
-60000/60000 - 6s - loss: 0.0494 - accuracy: 0.9875 - val_loss: 0.3579 - val_accuracy: 0.9228
-Epoch 877/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9876 - val_loss: 0.3603 - val_accuracy: 0.9226
-Epoch 878/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9878 - val_loss: 0.3620 - val_accuracy: 0.9215
-Epoch 879/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9880 - val_loss: 0.3623 - val_accuracy: 0.9220
-Epoch 880/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9880 - val_loss: 0.3591 - val_accuracy: 0.9229
-Epoch 881/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3631 - val_accuracy: 0.9217
-Epoch 882/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9879 - val_loss: 0.3585 - val_accuracy: 0.9218
-Epoch 883/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3600 - val_accuracy: 0.9218
-Epoch 884/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9881 - val_loss: 0.3605 - val_accuracy: 0.9220
-Epoch 885/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9881 - val_loss: 0.3609 - val_accuracy: 0.9218
-Epoch 886/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3608 - val_accuracy: 0.9208
-Epoch 887/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9881 - val_loss: 0.3593 - val_accuracy: 0.9222
-Epoch 888/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3593 - val_accuracy: 0.9225
-Epoch 889/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9881 - val_loss: 0.3596 - val_accuracy: 0.9221
-Epoch 890/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9880 - val_loss: 0.3629 - val_accuracy: 0.9225
-Epoch 891/1000
-60000/60000 - 6s - loss: 0.0490 - accuracy: 0.9876 - val_loss: 0.3597 - val_accuracy: 0.9221
-Epoch 892/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9879 - val_loss: 0.3615 - val_accuracy: 0.9223
-Epoch 893/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3610 - val_accuracy: 0.9218
-Epoch 894/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3621 - val_accuracy: 0.9216
-Epoch 895/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3618 - val_accuracy: 0.9226
-Epoch 896/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3632 - val_accuracy: 0.9221
-Epoch 897/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9882 - val_loss: 0.3621 - val_accuracy: 0.9218
-Epoch 898/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9881 - val_loss: 0.3605 - val_accuracy: 0.9221
-Epoch 899/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9877 - val_loss: 0.3630 - val_accuracy: 0.9217
-Epoch 900/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3606 - val_accuracy: 0.9225
-Epoch 901/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3597 - val_accuracy: 0.9224
-Epoch 902/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3633 - val_accuracy: 0.9213
-Epoch 903/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9877 - val_loss: 0.3627 - val_accuracy: 0.9214
-Epoch 904/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3624 - val_accuracy: 0.9212
-Epoch 905/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3597 - val_accuracy: 0.9224
-Epoch 906/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3612 - val_accuracy: 0.9219
-Epoch 907/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9882 - val_loss: 0.3623 - val_accuracy: 0.9221
-Epoch 908/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3616 - val_accuracy: 0.9231
-Epoch 909/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3604 - val_accuracy: 0.9229
-Epoch 910/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3606 - val_accuracy: 0.9223
-Epoch 911/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9882 - val_loss: 0.3608 - val_accuracy: 0.9229
-Epoch 912/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9881 - val_loss: 0.3614 - val_accuracy: 0.9225
-Epoch 913/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3599 - val_accuracy: 0.9218
-Epoch 914/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9881 - val_loss: 0.3611 - val_accuracy: 0.9223
-Epoch 915/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3598 - val_accuracy: 0.9222
-Epoch 916/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9878 - val_loss: 0.3612 - val_accuracy: 0.9211
-Epoch 917/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9876 - val_loss: 0.3620 - val_accuracy: 0.9208
-Epoch 918/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9876 - val_loss: 0.3646 - val_accuracy: 0.9208
-Epoch 919/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9880 - val_loss: 0.3589 - val_accuracy: 0.9228
-Epoch 920/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3585 - val_accuracy: 0.9222
-Epoch 921/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9881 - val_loss: 0.3594 - val_accuracy: 0.9235
-Epoch 922/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9881 - val_loss: 0.3603 - val_accuracy: 0.9226
-Epoch 923/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3617 - val_accuracy: 0.9221
-Epoch 924/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9879 - val_loss: 0.3623 - val_accuracy: 0.9217
-Epoch 925/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9879 - val_loss: 0.3582 - val_accuracy: 0.9222
-Epoch 926/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9880 - val_loss: 0.3598 - val_accuracy: 0.9224
-Epoch 927/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9880 - val_loss: 0.3568 - val_accuracy: 0.9223
-Epoch 928/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3612 - val_accuracy: 0.9218
-Epoch 929/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9881 - val_loss: 0.3616 - val_accuracy: 0.9221
-Epoch 930/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9880 - val_loss: 0.3608 - val_accuracy: 0.9223
-Epoch 931/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3627 - val_accuracy: 0.9216
-Epoch 932/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3607 - val_accuracy: 0.9223
-Epoch 933/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3612 - val_accuracy: 0.9225
-Epoch 934/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9881 - val_loss: 0.3617 - val_accuracy: 0.9219
-Epoch 935/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3611 - val_accuracy: 0.9219
-Epoch 936/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9882 - val_loss: 0.3622 - val_accuracy: 0.9215
-Epoch 937/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9879 - val_loss: 0.3635 - val_accuracy: 0.9218
-Epoch 938/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3612 - val_accuracy: 0.9222
-Epoch 939/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3619 - val_accuracy: 0.9214
-Epoch 940/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3614 - val_accuracy: 0.9218
-Epoch 941/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9881 - val_loss: 0.3621 - val_accuracy: 0.9220
-Epoch 942/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9883 - val_loss: 0.3639 - val_accuracy: 0.9212
-Epoch 943/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9882 - val_loss: 0.3616 - val_accuracy: 0.9231
-Epoch 944/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9880 - val_loss: 0.3627 - val_accuracy: 0.9206
-Epoch 945/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9881 - val_loss: 0.3649 - val_accuracy: 0.9219
-Epoch 946/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9882 - val_loss: 0.3647 - val_accuracy: 0.9217
-Epoch 947/1000
-60000/60000 - 6s - loss: 0.0471 - accuracy: 0.9882 - val_loss: 0.3641 - val_accuracy: 0.9220
-Epoch 948/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9882 - val_loss: 0.3639 - val_accuracy: 0.9209
-Epoch 949/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9876 - val_loss: 0.3631 - val_accuracy: 0.9213
-Epoch 950/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3633 - val_accuracy: 0.9220
-Epoch 951/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9877 - val_loss: 0.3623 - val_accuracy: 0.9221
-Epoch 952/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9878 - val_loss: 0.3646 - val_accuracy: 0.9208
-Epoch 953/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3649 - val_accuracy: 0.9222
-Epoch 954/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9881 - val_loss: 0.3624 - val_accuracy: 0.9219
-Epoch 955/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3625 - val_accuracy: 0.9212
-Epoch 956/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9879 - val_loss: 0.3643 - val_accuracy: 0.9211
-Epoch 957/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9878 - val_loss: 0.3648 - val_accuracy: 0.9222
-Epoch 958/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3630 - val_accuracy: 0.9213
-Epoch 959/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9882 - val_loss: 0.3627 - val_accuracy: 0.9216
-Epoch 960/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9880 - val_loss: 0.3632 - val_accuracy: 0.9214
-Epoch 961/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9882 - val_loss: 0.3621 - val_accuracy: 0.9210
-Epoch 962/1000
-60000/60000 - 6s - loss: 0.0488 - accuracy: 0.9877 - val_loss: 0.3611 - val_accuracy: 0.9227
-Epoch 963/1000
-60000/60000 - 6s - loss: 0.0492 - accuracy: 0.9876 - val_loss: 0.3618 - val_accuracy: 0.9215
-Epoch 964/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9879 - val_loss: 0.3629 - val_accuracy: 0.9231
-Epoch 965/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9880 - val_loss: 0.3602 - val_accuracy: 0.9217
-Epoch 966/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9879 - val_loss: 0.3643 - val_accuracy: 0.9221
-Epoch 967/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9882 - val_loss: 0.3625 - val_accuracy: 0.9224
-Epoch 968/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9882 - val_loss: 0.3633 - val_accuracy: 0.9220
-Epoch 969/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9882 - val_loss: 0.3633 - val_accuracy: 0.9218
-Epoch 970/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3632 - val_accuracy: 0.9224
-Epoch 971/1000
-60000/60000 - 6s - loss: 0.0485 - accuracy: 0.9878 - val_loss: 0.3629 - val_accuracy: 0.9222
-Epoch 972/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3645 - val_accuracy: 0.9208
-Epoch 973/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9878 - val_loss: 0.3620 - val_accuracy: 0.9213
-Epoch 974/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3639 - val_accuracy: 0.9217
-Epoch 975/1000
-60000/60000 - 6s - loss: 0.0479 - accuracy: 0.9880 - val_loss: 0.3632 - val_accuracy: 0.9219
-Epoch 976/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3640 - val_accuracy: 0.9227
-Epoch 977/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9880 - val_loss: 0.3622 - val_accuracy: 0.9211
-Epoch 978/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9880 - val_loss: 0.3652 - val_accuracy: 0.9221
-Epoch 979/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9879 - val_loss: 0.3631 - val_accuracy: 0.9222
-Epoch 980/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9880 - val_loss: 0.3640 - val_accuracy: 0.9224
-Epoch 981/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9882 - val_loss: 0.3666 - val_accuracy: 0.9208
-Epoch 982/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9876 - val_loss: 0.3658 - val_accuracy: 0.9216
-Epoch 983/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9883 - val_loss: 0.3642 - val_accuracy: 0.9219
-Epoch 984/1000
-60000/60000 - 6s - loss: 0.0472 - accuracy: 0.9882 - val_loss: 0.3649 - val_accuracy: 0.9218
-Epoch 985/1000
-60000/60000 - 6s - loss: 0.0471 - accuracy: 0.9882 - val_loss: 0.3639 - val_accuracy: 0.9215
-Epoch 986/1000
-60000/60000 - 6s - loss: 0.0470 - accuracy: 0.9883 - val_loss: 0.3657 - val_accuracy: 0.9221
-Epoch 987/1000
-60000/60000 - 6s - loss: 0.0470 - accuracy: 0.9883 - val_loss: 0.3638 - val_accuracy: 0.9222
-Epoch 988/1000
-60000/60000 - 6s - loss: 0.0472 - accuracy: 0.9882 - val_loss: 0.3640 - val_accuracy: 0.9214
-Epoch 989/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9879 - val_loss: 0.3640 - val_accuracy: 0.9225
-Epoch 990/1000
-60000/60000 - 6s - loss: 0.0481 - accuracy: 0.9878 - val_loss: 0.3649 - val_accuracy: 0.9224
-Epoch 991/1000
-60000/60000 - 6s - loss: 0.0476 - accuracy: 0.9881 - val_loss: 0.3628 - val_accuracy: 0.9227
-Epoch 992/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9882 - val_loss: 0.3646 - val_accuracy: 0.9228
-Epoch 993/1000
-60000/60000 - 6s - loss: 0.0496 - accuracy: 0.9875 - val_loss: 0.3657 - val_accuracy: 0.9223
-Epoch 994/1000
-60000/60000 - 6s - loss: 0.0484 - accuracy: 0.9879 - val_loss: 0.3606 - val_accuracy: 0.9233
-Epoch 995/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9881 - val_loss: 0.3625 - val_accuracy: 0.9228
-Epoch 996/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9882 - val_loss: 0.3632 - val_accuracy: 0.9222
-Epoch 997/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9881 - val_loss: 0.3634 - val_accuracy: 0.9225
-Epoch 998/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9880 - val_loss: 0.3660 - val_accuracy: 0.9220
-Epoch 999/1000
-60000/60000 - 6s - loss: 0.0470 - accuracy: 0.9882 - val_loss: 0.3632 - val_accuracy: 0.9219
-Epoch 1000/1000
-60000/60000 - 6s - loss: 0.0473 - accuracy: 0.9881 - val_loss: 0.3640 - val_accuracy: 0.9218
-Test loss was 0.2384, test accuracy was 0.9324
diff --git a/MNIST/nonlinear_overriden.txt b/MNIST/nonlinear_overriden.txt
deleted file mode 100644
index cb685cf..0000000
--- a/MNIST/nonlinear_overriden.txt
+++ /dev/null
@@ -1,2005 +0,0 @@
-X_train shape: (60000, 784)
-60000 train samples
-10000 test samples
-Train on 60000 samples, validate on 10000 samples
-Epoch 1/1000
-60000/60000 - 7s - loss: 1.6067 - accuracy: 0.4892 - val_loss: 1.0555 - val_accuracy: 0.6518
-Epoch 2/1000
-60000/60000 - 5s - loss: 0.9542 - accuracy: 0.6926 - val_loss: 0.8209 - val_accuracy: 0.7388
-Epoch 3/1000
-60000/60000 - 5s - loss: 0.7931 - accuracy: 0.7519 - val_loss: 0.7211 - val_accuracy: 0.7768
-Epoch 4/1000
-60000/60000 - 5s - loss: 0.7119 - accuracy: 0.7799 - val_loss: 0.6585 - val_accuracy: 0.7962
-Epoch 5/1000
-60000/60000 - 5s - loss: 0.6562 - accuracy: 0.7993 - val_loss: 0.6167 - val_accuracy: 0.8100
-Epoch 6/1000
-60000/60000 - 5s - loss: 0.6164 - accuracy: 0.8131 - val_loss: 0.5879 - val_accuracy: 0.8192
-Epoch 7/1000
-60000/60000 - 5s - loss: 0.5865 - accuracy: 0.8229 - val_loss: 0.5626 - val_accuracy: 0.8295
-Epoch 8/1000
-60000/60000 - 5s - loss: 0.5613 - accuracy: 0.8310 - val_loss: 0.5442 - val_accuracy: 0.8345
-Epoch 9/1000
-60000/60000 - 5s - loss: 0.5401 - accuracy: 0.8369 - val_loss: 0.5268 - val_accuracy: 0.8410
-Epoch 10/1000
-60000/60000 - 5s - loss: 0.5233 - accuracy: 0.8430 - val_loss: 0.5132 - val_accuracy: 0.8465
-Epoch 11/1000
-60000/60000 - 5s - loss: 0.5088 - accuracy: 0.8470 - val_loss: 0.5012 - val_accuracy: 0.8513
-Epoch 12/1000
-60000/60000 - 5s - loss: 0.4961 - accuracy: 0.8516 - val_loss: 0.4918 - val_accuracy: 0.8537
-Epoch 13/1000
-60000/60000 - 5s - loss: 0.4834 - accuracy: 0.8553 - val_loss: 0.4824 - val_accuracy: 0.8583
-Epoch 14/1000
-60000/60000 - 5s - loss: 0.4725 - accuracy: 0.8595 - val_loss: 0.4759 - val_accuracy: 0.8608
-Epoch 15/1000
-60000/60000 - 5s - loss: 0.4628 - accuracy: 0.8630 - val_loss: 0.4689 - val_accuracy: 0.8620
-Epoch 16/1000
-60000/60000 - 5s - loss: 0.4538 - accuracy: 0.8656 - val_loss: 0.4608 - val_accuracy: 0.8653
-Epoch 17/1000
-60000/60000 - 5s - loss: 0.4449 - accuracy: 0.8685 - val_loss: 0.4554 - val_accuracy: 0.8664
-Epoch 18/1000
-60000/60000 - 5s - loss: 0.4370 - accuracy: 0.8709 - val_loss: 0.4507 - val_accuracy: 0.8684
-Epoch 19/1000
-60000/60000 - 5s - loss: 0.4301 - accuracy: 0.8733 - val_loss: 0.4446 - val_accuracy: 0.8707
-Epoch 20/1000
-60000/60000 - 5s - loss: 0.4227 - accuracy: 0.8755 - val_loss: 0.4411 - val_accuracy: 0.8708
-Epoch 21/1000
-60000/60000 - 5s - loss: 0.4174 - accuracy: 0.8772 - val_loss: 0.4357 - val_accuracy: 0.8719
-Epoch 22/1000
-60000/60000 - 5s - loss: 0.4105 - accuracy: 0.8791 - val_loss: 0.4326 - val_accuracy: 0.8729
-Epoch 23/1000
-60000/60000 - 5s - loss: 0.4048 - accuracy: 0.8807 - val_loss: 0.4288 - val_accuracy: 0.8753
-Epoch 24/1000
-60000/60000 - 5s - loss: 0.3997 - accuracy: 0.8831 - val_loss: 0.4252 - val_accuracy: 0.8759
-Epoch 25/1000
-60000/60000 - 5s - loss: 0.3945 - accuracy: 0.8845 - val_loss: 0.4199 - val_accuracy: 0.8783
-Epoch 26/1000
-60000/60000 - 5s - loss: 0.3886 - accuracy: 0.8863 - val_loss: 0.4161 - val_accuracy: 0.8801
-Epoch 27/1000
-60000/60000 - 5s - loss: 0.3838 - accuracy: 0.8875 - val_loss: 0.4134 - val_accuracy: 0.8806
-Epoch 28/1000
-60000/60000 - 5s - loss: 0.3792 - accuracy: 0.8889 - val_loss: 0.4093 - val_accuracy: 0.8800
-Epoch 29/1000
-60000/60000 - 5s - loss: 0.3749 - accuracy: 0.8904 - val_loss: 0.4050 - val_accuracy: 0.8824
-Epoch 30/1000
-60000/60000 - 5s - loss: 0.3702 - accuracy: 0.8917 - val_loss: 0.4023 - val_accuracy: 0.8830
-Epoch 31/1000
-60000/60000 - 5s - loss: 0.3662 - accuracy: 0.8929 - val_loss: 0.4001 - val_accuracy: 0.8850
-Epoch 32/1000
-60000/60000 - 5s - loss: 0.3623 - accuracy: 0.8939 - val_loss: 0.3966 - val_accuracy: 0.8855
-Epoch 33/1000
-60000/60000 - 5s - loss: 0.3586 - accuracy: 0.8949 - val_loss: 0.3926 - val_accuracy: 0.8872
-Epoch 34/1000
-60000/60000 - 5s - loss: 0.3546 - accuracy: 0.8962 - val_loss: 0.3888 - val_accuracy: 0.8876
-Epoch 35/1000
-60000/60000 - 5s - loss: 0.3503 - accuracy: 0.8978 - val_loss: 0.3873 - val_accuracy: 0.8880
-Epoch 36/1000
-60000/60000 - 5s - loss: 0.3475 - accuracy: 0.8984 - val_loss: 0.3845 - val_accuracy: 0.8887
-Epoch 37/1000
-60000/60000 - 5s - loss: 0.3439 - accuracy: 0.8998 - val_loss: 0.3818 - val_accuracy: 0.8896
-Epoch 38/1000
-60000/60000 - 5s - loss: 0.3404 - accuracy: 0.9010 - val_loss: 0.3803 - val_accuracy: 0.8899
-Epoch 39/1000
-60000/60000 - 5s - loss: 0.3377 - accuracy: 0.9016 - val_loss: 0.3770 - val_accuracy: 0.8906
-Epoch 40/1000
-60000/60000 - 5s - loss: 0.3344 - accuracy: 0.9026 - val_loss: 0.3743 - val_accuracy: 0.8910
-Epoch 41/1000
-60000/60000 - 5s - loss: 0.3318 - accuracy: 0.9031 - val_loss: 0.3722 - val_accuracy: 0.8937
-Epoch 42/1000
-60000/60000 - 5s - loss: 0.3290 - accuracy: 0.9044 - val_loss: 0.3710 - val_accuracy: 0.8942
-Epoch 43/1000
-60000/60000 - 5s - loss: 0.3266 - accuracy: 0.9051 - val_loss: 0.3687 - val_accuracy: 0.8935
-Epoch 44/1000
-60000/60000 - 5s - loss: 0.3240 - accuracy: 0.9060 - val_loss: 0.3691 - val_accuracy: 0.8945
-Epoch 45/1000
-60000/60000 - 5s - loss: 0.3217 - accuracy: 0.9066 - val_loss: 0.3652 - val_accuracy: 0.8949
-Epoch 46/1000
-60000/60000 - 5s - loss: 0.3193 - accuracy: 0.9075 - val_loss: 0.3645 - val_accuracy: 0.8950
-Epoch 47/1000
-60000/60000 - 5s - loss: 0.3171 - accuracy: 0.9081 - val_loss: 0.3622 - val_accuracy: 0.8958
-Epoch 48/1000
-60000/60000 - 5s - loss: 0.3146 - accuracy: 0.9090 - val_loss: 0.3601 - val_accuracy: 0.8962
-Epoch 49/1000
-60000/60000 - 5s - loss: 0.3125 - accuracy: 0.9098 - val_loss: 0.3578 - val_accuracy: 0.8967
-Epoch 50/1000
-60000/60000 - 5s - loss: 0.3098 - accuracy: 0.9110 - val_loss: 0.3576 - val_accuracy: 0.8980
-Epoch 51/1000
-60000/60000 - 5s - loss: 0.3076 - accuracy: 0.9113 - val_loss: 0.3563 - val_accuracy: 0.8980
-Epoch 52/1000
-60000/60000 - 5s - loss: 0.3059 - accuracy: 0.9120 - val_loss: 0.3542 - val_accuracy: 0.8997
-Epoch 53/1000
-60000/60000 - 5s - loss: 0.3036 - accuracy: 0.9131 - val_loss: 0.3514 - val_accuracy: 0.8985
-Epoch 54/1000
-60000/60000 - 5s - loss: 0.3010 - accuracy: 0.9136 - val_loss: 0.3511 - val_accuracy: 0.8987
-Epoch 55/1000
-60000/60000 - 5s - loss: 0.2994 - accuracy: 0.9138 - val_loss: 0.3483 - val_accuracy: 0.9006
-Epoch 56/1000
-60000/60000 - 5s - loss: 0.2974 - accuracy: 0.9149 - val_loss: 0.3471 - val_accuracy: 0.9009
-Epoch 57/1000
-60000/60000 - 5s - loss: 0.2952 - accuracy: 0.9152 - val_loss: 0.3463 - val_accuracy: 0.9006
-Epoch 58/1000
-60000/60000 - 5s - loss: 0.2937 - accuracy: 0.9158 - val_loss: 0.3437 - val_accuracy: 0.9024
-Epoch 59/1000
-60000/60000 - 5s - loss: 0.2918 - accuracy: 0.9164 - val_loss: 0.3428 - val_accuracy: 0.9013
-Epoch 60/1000
-60000/60000 - 5s - loss: 0.2901 - accuracy: 0.9169 - val_loss: 0.3405 - val_accuracy: 0.9018
-Epoch 61/1000
-60000/60000 - 5s - loss: 0.2881 - accuracy: 0.9176 - val_loss: 0.3394 - val_accuracy: 0.9023
-Epoch 62/1000
-60000/60000 - 5s - loss: 0.2863 - accuracy: 0.9178 - val_loss: 0.3392 - val_accuracy: 0.9022
-Epoch 63/1000
-60000/60000 - 5s - loss: 0.2852 - accuracy: 0.9182 - val_loss: 0.3372 - val_accuracy: 0.9032
-Epoch 64/1000
-60000/60000 - 5s - loss: 0.2832 - accuracy: 0.9190 - val_loss: 0.3357 - val_accuracy: 0.9036
-Epoch 65/1000
-60000/60000 - 5s - loss: 0.2814 - accuracy: 0.9190 - val_loss: 0.3333 - val_accuracy: 0.9051
-Epoch 66/1000
-60000/60000 - 5s - loss: 0.2797 - accuracy: 0.9195 - val_loss: 0.3324 - val_accuracy: 0.9037
-Epoch 67/1000
-60000/60000 - 5s - loss: 0.2781 - accuracy: 0.9198 - val_loss: 0.3322 - val_accuracy: 0.9049
-Epoch 68/1000
-60000/60000 - 5s - loss: 0.2768 - accuracy: 0.9203 - val_loss: 0.3293 - val_accuracy: 0.9064
-Epoch 69/1000
-60000/60000 - 5s - loss: 0.2749 - accuracy: 0.9209 - val_loss: 0.3291 - val_accuracy: 0.9068
-Epoch 70/1000
-60000/60000 - 5s - loss: 0.2736 - accuracy: 0.9220 - val_loss: 0.3267 - val_accuracy: 0.9076
-Epoch 71/1000
-60000/60000 - 5s - loss: 0.2717 - accuracy: 0.9213 - val_loss: 0.3258 - val_accuracy: 0.9070
-Epoch 72/1000
-60000/60000 - 5s - loss: 0.2704 - accuracy: 0.9217 - val_loss: 0.3261 - val_accuracy: 0.9068
-Epoch 73/1000
-60000/60000 - 5s - loss: 0.2692 - accuracy: 0.9220 - val_loss: 0.3239 - val_accuracy: 0.9081
-Epoch 74/1000
-60000/60000 - 5s - loss: 0.2678 - accuracy: 0.9227 - val_loss: 0.3220 - val_accuracy: 0.9083
-Epoch 75/1000
-60000/60000 - 5s - loss: 0.2664 - accuracy: 0.9228 - val_loss: 0.3214 - val_accuracy: 0.9086
-Epoch 76/1000
-60000/60000 - 5s - loss: 0.2647 - accuracy: 0.9235 - val_loss: 0.3202 - val_accuracy: 0.9091
-Epoch 77/1000
-60000/60000 - 5s - loss: 0.2636 - accuracy: 0.9235 - val_loss: 0.3205 - val_accuracy: 0.9081
-Epoch 78/1000
-60000/60000 - 5s - loss: 0.2621 - accuracy: 0.9241 - val_loss: 0.3187 - val_accuracy: 0.9082
-Epoch 79/1000
-60000/60000 - 5s - loss: 0.2606 - accuracy: 0.9247 - val_loss: 0.3176 - val_accuracy: 0.9097
-Epoch 80/1000
-60000/60000 - 5s - loss: 0.2597 - accuracy: 0.9249 - val_loss: 0.3169 - val_accuracy: 0.9091
-Epoch 81/1000
-60000/60000 - 5s - loss: 0.2582 - accuracy: 0.9254 - val_loss: 0.3161 - val_accuracy: 0.9092
-Epoch 82/1000
-60000/60000 - 5s - loss: 0.2572 - accuracy: 0.9255 - val_loss: 0.3160 - val_accuracy: 0.9097
-Epoch 83/1000
-60000/60000 - 5s - loss: 0.2558 - accuracy: 0.9258 - val_loss: 0.3154 - val_accuracy: 0.9098
-Epoch 84/1000
-60000/60000 - 5s - loss: 0.2550 - accuracy: 0.9264 - val_loss: 0.3142 - val_accuracy: 0.9097
-Epoch 85/1000
-60000/60000 - 5s - loss: 0.2534 - accuracy: 0.9270 - val_loss: 0.3141 - val_accuracy: 0.9100
-Epoch 86/1000
-60000/60000 - 5s - loss: 0.2522 - accuracy: 0.9269 - val_loss: 0.3127 - val_accuracy: 0.9104
-Epoch 87/1000
-60000/60000 - 5s - loss: 0.2514 - accuracy: 0.9274 - val_loss: 0.3120 - val_accuracy: 0.9103
-Epoch 88/1000
-60000/60000 - 5s - loss: 0.2501 - accuracy: 0.9281 - val_loss: 0.3118 - val_accuracy: 0.9107
-Epoch 89/1000
-60000/60000 - 5s - loss: 0.2490 - accuracy: 0.9284 - val_loss: 0.3107 - val_accuracy: 0.9108
-Epoch 90/1000
-60000/60000 - 5s - loss: 0.2477 - accuracy: 0.9284 - val_loss: 0.3103 - val_accuracy: 0.9112
-Epoch 91/1000
-60000/60000 - 5s - loss: 0.2468 - accuracy: 0.9290 - val_loss: 0.3101 - val_accuracy: 0.9117
-Epoch 92/1000
-60000/60000 - 5s - loss: 0.2459 - accuracy: 0.9295 - val_loss: 0.3089 - val_accuracy: 0.9124
-Epoch 93/1000
-60000/60000 - 5s - loss: 0.2445 - accuracy: 0.9299 - val_loss: 0.3065 - val_accuracy: 0.9132
-Epoch 94/1000
-60000/60000 - 5s - loss: 0.2436 - accuracy: 0.9301 - val_loss: 0.3068 - val_accuracy: 0.9120
-Epoch 95/1000
-60000/60000 - 5s - loss: 0.2425 - accuracy: 0.9304 - val_loss: 0.3071 - val_accuracy: 0.9122
-Epoch 96/1000
-60000/60000 - 5s - loss: 0.2416 - accuracy: 0.9309 - val_loss: 0.3060 - val_accuracy: 0.9129
-Epoch 97/1000
-60000/60000 - 5s - loss: 0.2407 - accuracy: 0.9313 - val_loss: 0.3061 - val_accuracy: 0.9131
-Epoch 98/1000
-60000/60000 - 5s - loss: 0.2397 - accuracy: 0.9314 - val_loss: 0.3054 - val_accuracy: 0.9127
-Epoch 99/1000
-60000/60000 - 5s - loss: 0.2387 - accuracy: 0.9323 - val_loss: 0.3040 - val_accuracy: 0.9134
-Epoch 100/1000
-60000/60000 - 5s - loss: 0.2377 - accuracy: 0.9326 - val_loss: 0.3049 - val_accuracy: 0.9131
-Epoch 101/1000
-60000/60000 - 5s - loss: 0.2369 - accuracy: 0.9328 - val_loss: 0.3041 - val_accuracy: 0.9145
-Epoch 102/1000
-60000/60000 - 5s - loss: 0.2359 - accuracy: 0.9332 - val_loss: 0.3037 - val_accuracy: 0.9142
-Epoch 103/1000
-60000/60000 - 5s - loss: 0.2350 - accuracy: 0.9335 - val_loss: 0.3030 - val_accuracy: 0.9151
-Epoch 104/1000
-60000/60000 - 5s - loss: 0.2339 - accuracy: 0.9338 - val_loss: 0.3027 - val_accuracy: 0.9149
-Epoch 105/1000
-60000/60000 - 5s - loss: 0.2331 - accuracy: 0.9340 - val_loss: 0.3025 - val_accuracy: 0.9150
-Epoch 106/1000
-60000/60000 - 5s - loss: 0.2321 - accuracy: 0.9343 - val_loss: 0.3025 - val_accuracy: 0.9147
-Epoch 107/1000
-60000/60000 - 5s - loss: 0.2312 - accuracy: 0.9346 - val_loss: 0.3023 - val_accuracy: 0.9154
-Epoch 108/1000
-60000/60000 - 5s - loss: 0.2303 - accuracy: 0.9347 - val_loss: 0.3016 - val_accuracy: 0.9161
-Epoch 109/1000
-60000/60000 - 5s - loss: 0.2295 - accuracy: 0.9352 - val_loss: 0.3012 - val_accuracy: 0.9150
-Epoch 110/1000
-60000/60000 - 5s - loss: 0.2288 - accuracy: 0.9355 - val_loss: 0.3021 - val_accuracy: 0.9147
-Epoch 111/1000
-60000/60000 - 5s - loss: 0.2282 - accuracy: 0.9354 - val_loss: 0.3007 - val_accuracy: 0.9156
-Epoch 112/1000
-60000/60000 - 5s - loss: 0.2271 - accuracy: 0.9359 - val_loss: 0.3009 - val_accuracy: 0.9162
-Epoch 113/1000
-60000/60000 - 5s - loss: 0.2264 - accuracy: 0.9360 - val_loss: 0.3003 - val_accuracy: 0.9164
-Epoch 114/1000
-60000/60000 - 5s - loss: 0.2257 - accuracy: 0.9361 - val_loss: 0.2998 - val_accuracy: 0.9174
-Epoch 115/1000
-60000/60000 - 5s - loss: 0.2249 - accuracy: 0.9363 - val_loss: 0.2983 - val_accuracy: 0.9168
-Epoch 116/1000
-60000/60000 - 5s - loss: 0.2242 - accuracy: 0.9366 - val_loss: 0.2995 - val_accuracy: 0.9159
-Epoch 117/1000
-60000/60000 - 5s - loss: 0.2232 - accuracy: 0.9371 - val_loss: 0.2988 - val_accuracy: 0.9162
-Epoch 118/1000
-60000/60000 - 5s - loss: 0.2226 - accuracy: 0.9376 - val_loss: 0.2988 - val_accuracy: 0.9163
-Epoch 119/1000
-60000/60000 - 5s - loss: 0.2214 - accuracy: 0.9374 - val_loss: 0.2988 - val_accuracy: 0.9163
-Epoch 120/1000
-60000/60000 - 5s - loss: 0.2211 - accuracy: 0.9377 - val_loss: 0.2990 - val_accuracy: 0.9167
-Epoch 121/1000
-60000/60000 - 5s - loss: 0.2202 - accuracy: 0.9377 - val_loss: 0.2982 - val_accuracy: 0.9161
-Epoch 122/1000
-60000/60000 - 5s - loss: 0.2197 - accuracy: 0.9384 - val_loss: 0.2977 - val_accuracy: 0.9170
-Epoch 123/1000
-60000/60000 - 5s - loss: 0.2188 - accuracy: 0.9385 - val_loss: 0.2973 - val_accuracy: 0.9169
-Epoch 124/1000
-60000/60000 - 5s - loss: 0.2180 - accuracy: 0.9387 - val_loss: 0.2969 - val_accuracy: 0.9171
-Epoch 125/1000
-60000/60000 - 5s - loss: 0.2176 - accuracy: 0.9391 - val_loss: 0.2967 - val_accuracy: 0.9168
-Epoch 126/1000
-60000/60000 - 5s - loss: 0.2166 - accuracy: 0.9396 - val_loss: 0.2961 - val_accuracy: 0.9168
-Epoch 127/1000
-60000/60000 - 5s - loss: 0.2161 - accuracy: 0.9397 - val_loss: 0.2962 - val_accuracy: 0.9167
-Epoch 128/1000
-60000/60000 - 5s - loss: 0.2153 - accuracy: 0.9397 - val_loss: 0.2955 - val_accuracy: 0.9171
-Epoch 129/1000
-60000/60000 - 5s - loss: 0.2146 - accuracy: 0.9401 - val_loss: 0.2955 - val_accuracy: 0.9175
-Epoch 130/1000
-60000/60000 - 5s - loss: 0.2140 - accuracy: 0.9403 - val_loss: 0.2946 - val_accuracy: 0.9164
-Epoch 131/1000
-60000/60000 - 5s - loss: 0.2132 - accuracy: 0.9404 - val_loss: 0.2950 - val_accuracy: 0.9175
-Epoch 132/1000
-60000/60000 - 5s - loss: 0.2126 - accuracy: 0.9410 - val_loss: 0.2950 - val_accuracy: 0.9172
-Epoch 133/1000
-60000/60000 - 5s - loss: 0.2120 - accuracy: 0.9410 - val_loss: 0.2956 - val_accuracy: 0.9180
-Epoch 134/1000
-60000/60000 - 5s - loss: 0.2113 - accuracy: 0.9415 - val_loss: 0.2950 - val_accuracy: 0.9180
-Epoch 135/1000
-60000/60000 - 5s - loss: 0.2106 - accuracy: 0.9417 - val_loss: 0.2955 - val_accuracy: 0.9169
-Epoch 136/1000
-60000/60000 - 5s - loss: 0.2100 - accuracy: 0.9420 - val_loss: 0.2957 - val_accuracy: 0.9181
-Epoch 137/1000
-60000/60000 - 5s - loss: 0.2093 - accuracy: 0.9425 - val_loss: 0.2948 - val_accuracy: 0.9189
-Epoch 138/1000
-60000/60000 - 5s - loss: 0.2086 - accuracy: 0.9422 - val_loss: 0.2946 - val_accuracy: 0.9174
-Epoch 139/1000
-60000/60000 - 5s - loss: 0.2080 - accuracy: 0.9425 - val_loss: 0.2950 - val_accuracy: 0.9171
-Epoch 140/1000
-60000/60000 - 5s - loss: 0.2075 - accuracy: 0.9427 - val_loss: 0.2948 - val_accuracy: 0.9173
-Epoch 141/1000
-60000/60000 - 5s - loss: 0.2068 - accuracy: 0.9433 - val_loss: 0.2955 - val_accuracy: 0.9178
-Epoch 142/1000
-60000/60000 - 5s - loss: 0.2064 - accuracy: 0.9431 - val_loss: 0.2953 - val_accuracy: 0.9175
-Epoch 143/1000
-60000/60000 - 5s - loss: 0.2058 - accuracy: 0.9433 - val_loss: 0.2952 - val_accuracy: 0.9175
-Epoch 144/1000
-60000/60000 - 5s - loss: 0.2052 - accuracy: 0.9437 - val_loss: 0.2963 - val_accuracy: 0.9172
-Epoch 145/1000
-60000/60000 - 5s - loss: 0.2047 - accuracy: 0.9437 - val_loss: 0.2952 - val_accuracy: 0.9178
-Epoch 146/1000
-60000/60000 - 5s - loss: 0.2039 - accuracy: 0.9436 - val_loss: 0.2951 - val_accuracy: 0.9177
-Epoch 147/1000
-60000/60000 - 5s - loss: 0.2033 - accuracy: 0.9439 - val_loss: 0.2952 - val_accuracy: 0.9181
-Epoch 148/1000
-60000/60000 - 5s - loss: 0.2029 - accuracy: 0.9441 - val_loss: 0.2944 - val_accuracy: 0.9180
-Epoch 149/1000
-60000/60000 - 5s - loss: 0.2024 - accuracy: 0.9444 - val_loss: 0.2953 - val_accuracy: 0.9178
-Epoch 150/1000
-60000/60000 - 5s - loss: 0.2020 - accuracy: 0.9443 - val_loss: 0.2943 - val_accuracy: 0.9182
-Epoch 151/1000
-60000/60000 - 5s - loss: 0.2013 - accuracy: 0.9445 - val_loss: 0.2952 - val_accuracy: 0.9186
-Epoch 152/1000
-60000/60000 - 5s - loss: 0.2006 - accuracy: 0.9449 - val_loss: 0.2958 - val_accuracy: 0.9181
-Epoch 153/1000
-60000/60000 - 5s - loss: 0.2001 - accuracy: 0.9448 - val_loss: 0.2956 - val_accuracy: 0.9179
-Epoch 154/1000
-60000/60000 - 5s - loss: 0.1997 - accuracy: 0.9450 - val_loss: 0.2950 - val_accuracy: 0.9177
-Epoch 155/1000
-60000/60000 - 5s - loss: 0.1991 - accuracy: 0.9451 - val_loss: 0.2951 - val_accuracy: 0.9180
-Epoch 156/1000
-60000/60000 - 5s - loss: 0.1986 - accuracy: 0.9455 - val_loss: 0.2956 - val_accuracy: 0.9174
-Epoch 157/1000
-60000/60000 - 5s - loss: 0.1983 - accuracy: 0.9455 - val_loss: 0.2949 - val_accuracy: 0.9187
-Epoch 158/1000
-60000/60000 - 5s - loss: 0.1975 - accuracy: 0.9456 - val_loss: 0.2963 - val_accuracy: 0.9176
-Epoch 159/1000
-60000/60000 - 5s - loss: 0.1970 - accuracy: 0.9459 - val_loss: 0.2952 - val_accuracy: 0.9190
-Epoch 160/1000
-60000/60000 - 5s - loss: 0.1967 - accuracy: 0.9459 - val_loss: 0.2947 - val_accuracy: 0.9182
-Epoch 161/1000
-60000/60000 - 5s - loss: 0.1961 - accuracy: 0.9461 - val_loss: 0.2955 - val_accuracy: 0.9186
-Epoch 162/1000
-60000/60000 - 5s - loss: 0.1957 - accuracy: 0.9462 - val_loss: 0.2959 - val_accuracy: 0.9174
-Epoch 163/1000
-60000/60000 - 5s - loss: 0.1953 - accuracy: 0.9462 - val_loss: 0.2953 - val_accuracy: 0.9188
-Epoch 164/1000
-60000/60000 - 5s - loss: 0.1947 - accuracy: 0.9466 - val_loss: 0.2952 - val_accuracy: 0.9192
-Epoch 165/1000
-60000/60000 - 5s - loss: 0.1941 - accuracy: 0.9470 - val_loss: 0.2955 - val_accuracy: 0.9177
-Epoch 166/1000
-60000/60000 - 5s - loss: 0.1937 - accuracy: 0.9468 - val_loss: 0.2959 - val_accuracy: 0.9181
-Epoch 167/1000
-60000/60000 - 5s - loss: 0.1933 - accuracy: 0.9468 - val_loss: 0.2954 - val_accuracy: 0.9179
-Epoch 168/1000
-60000/60000 - 5s - loss: 0.1930 - accuracy: 0.9468 - val_loss: 0.2961 - val_accuracy: 0.9180
-Epoch 169/1000
-60000/60000 - 5s - loss: 0.1925 - accuracy: 0.9472 - val_loss: 0.2950 - val_accuracy: 0.9194
-Epoch 170/1000
-60000/60000 - 5s - loss: 0.1919 - accuracy: 0.9475 - val_loss: 0.2953 - val_accuracy: 0.9183
-Epoch 171/1000
-60000/60000 - 5s - loss: 0.1915 - accuracy: 0.9475 - val_loss: 0.2959 - val_accuracy: 0.9179
-Epoch 172/1000
-60000/60000 - 5s - loss: 0.1914 - accuracy: 0.9475 - val_loss: 0.2961 - val_accuracy: 0.9184
-Epoch 173/1000
-60000/60000 - 5s - loss: 0.1909 - accuracy: 0.9478 - val_loss: 0.2955 - val_accuracy: 0.9180
-Epoch 174/1000
-60000/60000 - 5s - loss: 0.1903 - accuracy: 0.9479 - val_loss: 0.2955 - val_accuracy: 0.9191
-Epoch 175/1000
-60000/60000 - 5s - loss: 0.1899 - accuracy: 0.9483 - val_loss: 0.2957 - val_accuracy: 0.9187
-Epoch 176/1000
-60000/60000 - 5s - loss: 0.1894 - accuracy: 0.9483 - val_loss: 0.2960 - val_accuracy: 0.9189
-Epoch 177/1000
-60000/60000 - 5s - loss: 0.1890 - accuracy: 0.9482 - val_loss: 0.2959 - val_accuracy: 0.9190
-Epoch 178/1000
-60000/60000 - 5s - loss: 0.1885 - accuracy: 0.9486 - val_loss: 0.2966 - val_accuracy: 0.9178
-Epoch 179/1000
-60000/60000 - 5s - loss: 0.1883 - accuracy: 0.9485 - val_loss: 0.2962 - val_accuracy: 0.9183
-Epoch 180/1000
-60000/60000 - 5s - loss: 0.1878 - accuracy: 0.9485 - val_loss: 0.2953 - val_accuracy: 0.9195
-Epoch 181/1000
-60000/60000 - 5s - loss: 0.1876 - accuracy: 0.9487 - val_loss: 0.2948 - val_accuracy: 0.9187
-Epoch 182/1000
-60000/60000 - 5s - loss: 0.1870 - accuracy: 0.9491 - val_loss: 0.2954 - val_accuracy: 0.9192
-Epoch 183/1000
-60000/60000 - 5s - loss: 0.1866 - accuracy: 0.9488 - val_loss: 0.2961 - val_accuracy: 0.9192
-Epoch 184/1000
-60000/60000 - 5s - loss: 0.1863 - accuracy: 0.9488 - val_loss: 0.2950 - val_accuracy: 0.9196
-Epoch 185/1000
-60000/60000 - 5s - loss: 0.1858 - accuracy: 0.9493 - val_loss: 0.2957 - val_accuracy: 0.9188
-Epoch 186/1000
-60000/60000 - 5s - loss: 0.1856 - accuracy: 0.9491 - val_loss: 0.2952 - val_accuracy: 0.9191
-Epoch 187/1000
-60000/60000 - 5s - loss: 0.1851 - accuracy: 0.9494 - val_loss: 0.2946 - val_accuracy: 0.9192
-Epoch 188/1000
-60000/60000 - 5s - loss: 0.1847 - accuracy: 0.9493 - val_loss: 0.2951 - val_accuracy: 0.9197
-Epoch 189/1000
-60000/60000 - 5s - loss: 0.1843 - accuracy: 0.9497 - val_loss: 0.2943 - val_accuracy: 0.9192
-Epoch 190/1000
-60000/60000 - 5s - loss: 0.1838 - accuracy: 0.9499 - val_loss: 0.2957 - val_accuracy: 0.9181
-Epoch 191/1000
-60000/60000 - 5s - loss: 0.1835 - accuracy: 0.9500 - val_loss: 0.2946 - val_accuracy: 0.9185
-Epoch 192/1000
-60000/60000 - 5s - loss: 0.1831 - accuracy: 0.9499 - val_loss: 0.2947 - val_accuracy: 0.9188
-Epoch 193/1000
-60000/60000 - 5s - loss: 0.1826 - accuracy: 0.9503 - val_loss: 0.2951 - val_accuracy: 0.9187
-Epoch 194/1000
-60000/60000 - 5s - loss: 0.1823 - accuracy: 0.9500 - val_loss: 0.2955 - val_accuracy: 0.9187
-Epoch 195/1000
-60000/60000 - 5s - loss: 0.1821 - accuracy: 0.9505 - val_loss: 0.2949 - val_accuracy: 0.9183
-Epoch 196/1000
-60000/60000 - 5s - loss: 0.1816 - accuracy: 0.9506 - val_loss: 0.2943 - val_accuracy: 0.9197
-Epoch 197/1000
-60000/60000 - 5s - loss: 0.1812 - accuracy: 0.9501 - val_loss: 0.2951 - val_accuracy: 0.9194
-Epoch 198/1000
-60000/60000 - 5s - loss: 0.1808 - accuracy: 0.9505 - val_loss: 0.2951 - val_accuracy: 0.9186
-Epoch 199/1000
-60000/60000 - 5s - loss: 0.1805 - accuracy: 0.9512 - val_loss: 0.2943 - val_accuracy: 0.9186
-Epoch 200/1000
-60000/60000 - 5s - loss: 0.1801 - accuracy: 0.9510 - val_loss: 0.2960 - val_accuracy: 0.9182
-Epoch 201/1000
-60000/60000 - 5s - loss: 0.1798 - accuracy: 0.9506 - val_loss: 0.2951 - val_accuracy: 0.9187
-Epoch 202/1000
-60000/60000 - 5s - loss: 0.1795 - accuracy: 0.9509 - val_loss: 0.2949 - val_accuracy: 0.9186
-Epoch 203/1000
-60000/60000 - 5s - loss: 0.1791 - accuracy: 0.9511 - val_loss: 0.2949 - val_accuracy: 0.9189
-Epoch 204/1000
-60000/60000 - 5s - loss: 0.1787 - accuracy: 0.9513 - val_loss: 0.2947 - val_accuracy: 0.9185
-Epoch 205/1000
-60000/60000 - 5s - loss: 0.1784 - accuracy: 0.9512 - val_loss: 0.2949 - val_accuracy: 0.9187
-Epoch 206/1000
-60000/60000 - 5s - loss: 0.1780 - accuracy: 0.9515 - val_loss: 0.2951 - val_accuracy: 0.9183
-Epoch 207/1000
-60000/60000 - 5s - loss: 0.1775 - accuracy: 0.9516 - val_loss: 0.2943 - val_accuracy: 0.9190
-Epoch 208/1000
-60000/60000 - 5s - loss: 0.1774 - accuracy: 0.9513 - val_loss: 0.2945 - val_accuracy: 0.9187
-Epoch 209/1000
-60000/60000 - 5s - loss: 0.1770 - accuracy: 0.9513 - val_loss: 0.2959 - val_accuracy: 0.9186
-Epoch 210/1000
-60000/60000 - 5s - loss: 0.1767 - accuracy: 0.9515 - val_loss: 0.2959 - val_accuracy: 0.9187
-Epoch 211/1000
-60000/60000 - 5s - loss: 0.1764 - accuracy: 0.9517 - val_loss: 0.2955 - val_accuracy: 0.9187
-Epoch 212/1000
-60000/60000 - 5s - loss: 0.1759 - accuracy: 0.9518 - val_loss: 0.2945 - val_accuracy: 0.9191
-Epoch 213/1000
-60000/60000 - 5s - loss: 0.1758 - accuracy: 0.9519 - val_loss: 0.2955 - val_accuracy: 0.9180
-Epoch 214/1000
-60000/60000 - 5s - loss: 0.1752 - accuracy: 0.9519 - val_loss: 0.2953 - val_accuracy: 0.9186
-Epoch 215/1000
-60000/60000 - 5s - loss: 0.1749 - accuracy: 0.9518 - val_loss: 0.2958 - val_accuracy: 0.9187
-Epoch 216/1000
-60000/60000 - 5s - loss: 0.1746 - accuracy: 0.9523 - val_loss: 0.2947 - val_accuracy: 0.9192
-Epoch 217/1000
-60000/60000 - 5s - loss: 0.1742 - accuracy: 0.9520 - val_loss: 0.2953 - val_accuracy: 0.9195
-Epoch 218/1000
-60000/60000 - 5s - loss: 0.1740 - accuracy: 0.9521 - val_loss: 0.2951 - val_accuracy: 0.9193
-Epoch 219/1000
-60000/60000 - 5s - loss: 0.1735 - accuracy: 0.9525 - val_loss: 0.2952 - val_accuracy: 0.9189
-Epoch 220/1000
-60000/60000 - 5s - loss: 0.1733 - accuracy: 0.9524 - val_loss: 0.2955 - val_accuracy: 0.9191
-Epoch 221/1000
-60000/60000 - 5s - loss: 0.1729 - accuracy: 0.9527 - val_loss: 0.2949 - val_accuracy: 0.9194
-Epoch 222/1000
-60000/60000 - 5s - loss: 0.1725 - accuracy: 0.9528 - val_loss: 0.2949 - val_accuracy: 0.9188
-Epoch 223/1000
-60000/60000 - 5s - loss: 0.1722 - accuracy: 0.9531 - val_loss: 0.2944 - val_accuracy: 0.9186
-Epoch 224/1000
-60000/60000 - 5s - loss: 0.1721 - accuracy: 0.9529 - val_loss: 0.2959 - val_accuracy: 0.9183
-Epoch 225/1000
-60000/60000 - 5s - loss: 0.1716 - accuracy: 0.9529 - val_loss: 0.2955 - val_accuracy: 0.9182
-Epoch 226/1000
-60000/60000 - 5s - loss: 0.1713 - accuracy: 0.9529 - val_loss: 0.2950 - val_accuracy: 0.9184
-Epoch 227/1000
-60000/60000 - 5s - loss: 0.1711 - accuracy: 0.9532 - val_loss: 0.2943 - val_accuracy: 0.9187
-Epoch 228/1000
-60000/60000 - 5s - loss: 0.1705 - accuracy: 0.9531 - val_loss: 0.2961 - val_accuracy: 0.9186
-Epoch 229/1000
-60000/60000 - 5s - loss: 0.1703 - accuracy: 0.9530 - val_loss: 0.2951 - val_accuracy: 0.9186
-Epoch 230/1000
-60000/60000 - 5s - loss: 0.1699 - accuracy: 0.9535 - val_loss: 0.2941 - val_accuracy: 0.9186
-Epoch 231/1000
-60000/60000 - 5s - loss: 0.1699 - accuracy: 0.9535 - val_loss: 0.2945 - val_accuracy: 0.9189
-Epoch 232/1000
-60000/60000 - 5s - loss: 0.1696 - accuracy: 0.9535 - val_loss: 0.2949 - val_accuracy: 0.9186
-Epoch 233/1000
-60000/60000 - 5s - loss: 0.1692 - accuracy: 0.9539 - val_loss: 0.2946 - val_accuracy: 0.9189
-Epoch 234/1000
-60000/60000 - 5s - loss: 0.1688 - accuracy: 0.9538 - val_loss: 0.2941 - val_accuracy: 0.9187
-Epoch 235/1000
-60000/60000 - 5s - loss: 0.1685 - accuracy: 0.9539 - val_loss: 0.2947 - val_accuracy: 0.9198
-Epoch 236/1000
-60000/60000 - 5s - loss: 0.1683 - accuracy: 0.9540 - val_loss: 0.2948 - val_accuracy: 0.9187
-Epoch 237/1000
-60000/60000 - 5s - loss: 0.1679 - accuracy: 0.9538 - val_loss: 0.2955 - val_accuracy: 0.9192
-Epoch 238/1000
-60000/60000 - 5s - loss: 0.1676 - accuracy: 0.9540 - val_loss: 0.2943 - val_accuracy: 0.9194
-Epoch 239/1000
-60000/60000 - 5s - loss: 0.1675 - accuracy: 0.9543 - val_loss: 0.2949 - val_accuracy: 0.9189
-Epoch 240/1000
-60000/60000 - 5s - loss: 0.1671 - accuracy: 0.9542 - val_loss: 0.2940 - val_accuracy: 0.9201
-Epoch 241/1000
-60000/60000 - 5s - loss: 0.1668 - accuracy: 0.9541 - val_loss: 0.2942 - val_accuracy: 0.9200
-Epoch 242/1000
-60000/60000 - 5s - loss: 0.1665 - accuracy: 0.9541 - val_loss: 0.2940 - val_accuracy: 0.9201
-Epoch 243/1000
-60000/60000 - 4s - loss: 0.1663 - accuracy: 0.9542 - val_loss: 0.2950 - val_accuracy: 0.9201
-Epoch 244/1000
-60000/60000 - 5s - loss: 0.1661 - accuracy: 0.9546 - val_loss: 0.2946 - val_accuracy: 0.9197
-Epoch 245/1000
-60000/60000 - 5s - loss: 0.1657 - accuracy: 0.9546 - val_loss: 0.2942 - val_accuracy: 0.9206
-Epoch 246/1000
-60000/60000 - 5s - loss: 0.1654 - accuracy: 0.9546 - val_loss: 0.2953 - val_accuracy: 0.9197
-Epoch 247/1000
-60000/60000 - 5s - loss: 0.1651 - accuracy: 0.9548 - val_loss: 0.2944 - val_accuracy: 0.9207
-Epoch 248/1000
-60000/60000 - 5s - loss: 0.1648 - accuracy: 0.9546 - val_loss: 0.2947 - val_accuracy: 0.9206
-Epoch 249/1000
-60000/60000 - 5s - loss: 0.1647 - accuracy: 0.9546 - val_loss: 0.2948 - val_accuracy: 0.9205
-Epoch 250/1000
-60000/60000 - 5s - loss: 0.1643 - accuracy: 0.9551 - val_loss: 0.2943 - val_accuracy: 0.9210
-Epoch 251/1000
-60000/60000 - 5s - loss: 0.1640 - accuracy: 0.9549 - val_loss: 0.2942 - val_accuracy: 0.9209
-Epoch 252/1000
-60000/60000 - 5s - loss: 0.1638 - accuracy: 0.9550 - val_loss: 0.2946 - val_accuracy: 0.9206
-Epoch 253/1000
-60000/60000 - 5s - loss: 0.1635 - accuracy: 0.9556 - val_loss: 0.2955 - val_accuracy: 0.9210
-Epoch 254/1000
-60000/60000 - 5s - loss: 0.1632 - accuracy: 0.9554 - val_loss: 0.2941 - val_accuracy: 0.9216
-Epoch 255/1000
-60000/60000 - 5s - loss: 0.1631 - accuracy: 0.9554 - val_loss: 0.2952 - val_accuracy: 0.9205
-Epoch 256/1000
-60000/60000 - 5s - loss: 0.1628 - accuracy: 0.9556 - val_loss: 0.2959 - val_accuracy: 0.9207
-Epoch 257/1000
-60000/60000 - 5s - loss: 0.1626 - accuracy: 0.9553 - val_loss: 0.2943 - val_accuracy: 0.9208
-Epoch 258/1000
-60000/60000 - 5s - loss: 0.1622 - accuracy: 0.9555 - val_loss: 0.2941 - val_accuracy: 0.9222
-Epoch 259/1000
-60000/60000 - 5s - loss: 0.1619 - accuracy: 0.9559 - val_loss: 0.2938 - val_accuracy: 0.9219
-Epoch 260/1000
-60000/60000 - 5s - loss: 0.1617 - accuracy: 0.9559 - val_loss: 0.2939 - val_accuracy: 0.9220
-Epoch 261/1000
-60000/60000 - 5s - loss: 0.1615 - accuracy: 0.9558 - val_loss: 0.2941 - val_accuracy: 0.9220
-Epoch 262/1000
-60000/60000 - 5s - loss: 0.1614 - accuracy: 0.9559 - val_loss: 0.2937 - val_accuracy: 0.9227
-Epoch 263/1000
-60000/60000 - 5s - loss: 0.1611 - accuracy: 0.9560 - val_loss: 0.2941 - val_accuracy: 0.9222
-Epoch 264/1000
-60000/60000 - 5s - loss: 0.1608 - accuracy: 0.9564 - val_loss: 0.2936 - val_accuracy: 0.9223
-Epoch 265/1000
-60000/60000 - 5s - loss: 0.1604 - accuracy: 0.9564 - val_loss: 0.2949 - val_accuracy: 0.9219
-Epoch 266/1000
-60000/60000 - 5s - loss: 0.1604 - accuracy: 0.9563 - val_loss: 0.2929 - val_accuracy: 0.9230
-Epoch 267/1000
-60000/60000 - 5s - loss: 0.1603 - accuracy: 0.9560 - val_loss: 0.2939 - val_accuracy: 0.9218
-Epoch 268/1000
-60000/60000 - 5s - loss: 0.1600 - accuracy: 0.9562 - val_loss: 0.2936 - val_accuracy: 0.9226
-Epoch 269/1000
-60000/60000 - 5s - loss: 0.1596 - accuracy: 0.9563 - val_loss: 0.2937 - val_accuracy: 0.9224
-Epoch 270/1000
-60000/60000 - 5s - loss: 0.1593 - accuracy: 0.9566 - val_loss: 0.2937 - val_accuracy: 0.9223
-Epoch 271/1000
-60000/60000 - 5s - loss: 0.1593 - accuracy: 0.9567 - val_loss: 0.2938 - val_accuracy: 0.9223
-Epoch 272/1000
-60000/60000 - 5s - loss: 0.1590 - accuracy: 0.9567 - val_loss: 0.2940 - val_accuracy: 0.9212
-Epoch 273/1000
-60000/60000 - 5s - loss: 0.1586 - accuracy: 0.9568 - val_loss: 0.2939 - val_accuracy: 0.9215
-Epoch 274/1000
-60000/60000 - 5s - loss: 0.1586 - accuracy: 0.9567 - val_loss: 0.2934 - val_accuracy: 0.9222
-Epoch 275/1000
-60000/60000 - 5s - loss: 0.1584 - accuracy: 0.9571 - val_loss: 0.2934 - val_accuracy: 0.9220
-Epoch 276/1000
-60000/60000 - 5s - loss: 0.1581 - accuracy: 0.9570 - val_loss: 0.2941 - val_accuracy: 0.9219
-Epoch 277/1000
-60000/60000 - 5s - loss: 0.1579 - accuracy: 0.9571 - val_loss: 0.2942 - val_accuracy: 0.9223
-Epoch 278/1000
-60000/60000 - 5s - loss: 0.1577 - accuracy: 0.9570 - val_loss: 0.2934 - val_accuracy: 0.9219
-Epoch 279/1000
-60000/60000 - 5s - loss: 0.1575 - accuracy: 0.9573 - val_loss: 0.2938 - val_accuracy: 0.9220
-Epoch 280/1000
-60000/60000 - 5s - loss: 0.1572 - accuracy: 0.9574 - val_loss: 0.2934 - val_accuracy: 0.9217
-Epoch 281/1000
-60000/60000 - 5s - loss: 0.1570 - accuracy: 0.9574 - val_loss: 0.2941 - val_accuracy: 0.9216
-Epoch 282/1000
-60000/60000 - 5s - loss: 0.1568 - accuracy: 0.9574 - val_loss: 0.2937 - val_accuracy: 0.9222
-Epoch 283/1000
-60000/60000 - 5s - loss: 0.1566 - accuracy: 0.9574 - val_loss: 0.2939 - val_accuracy: 0.9218
-Epoch 284/1000
-60000/60000 - 5s - loss: 0.1565 - accuracy: 0.9574 - val_loss: 0.2947 - val_accuracy: 0.9218
-Epoch 285/1000
-60000/60000 - 5s - loss: 0.1563 - accuracy: 0.9579 - val_loss: 0.2943 - val_accuracy: 0.9216
-Epoch 286/1000
-60000/60000 - 5s - loss: 0.1562 - accuracy: 0.9578 - val_loss: 0.2949 - val_accuracy: 0.9218
-Epoch 287/1000
-60000/60000 - 5s - loss: 0.1559 - accuracy: 0.9577 - val_loss: 0.2943 - val_accuracy: 0.9216
-Epoch 288/1000
-60000/60000 - 5s - loss: 0.1556 - accuracy: 0.9578 - val_loss: 0.2937 - val_accuracy: 0.9217
-Epoch 289/1000
-60000/60000 - 5s - loss: 0.1556 - accuracy: 0.9580 - val_loss: 0.2951 - val_accuracy: 0.9219
-Epoch 290/1000
-60000/60000 - 5s - loss: 0.1553 - accuracy: 0.9578 - val_loss: 0.2943 - val_accuracy: 0.9224
-Epoch 291/1000
-60000/60000 - 5s - loss: 0.1551 - accuracy: 0.9580 - val_loss: 0.2949 - val_accuracy: 0.9217
-Epoch 292/1000
-60000/60000 - 5s - loss: 0.1549 - accuracy: 0.9582 - val_loss: 0.2948 - val_accuracy: 0.9219
-Epoch 293/1000
-60000/60000 - 5s - loss: 0.1548 - accuracy: 0.9579 - val_loss: 0.2944 - val_accuracy: 0.9216
-Epoch 294/1000
-60000/60000 - 5s - loss: 0.1545 - accuracy: 0.9578 - val_loss: 0.2946 - val_accuracy: 0.9220
-Epoch 295/1000
-60000/60000 - 5s - loss: 0.1543 - accuracy: 0.9583 - val_loss: 0.2949 - val_accuracy: 0.9217
-Epoch 296/1000
-60000/60000 - 5s - loss: 0.1542 - accuracy: 0.9581 - val_loss: 0.2946 - val_accuracy: 0.9216
-Epoch 297/1000
-60000/60000 - 5s - loss: 0.1539 - accuracy: 0.9584 - val_loss: 0.2951 - val_accuracy: 0.9223
-Epoch 298/1000
-60000/60000 - 5s - loss: 0.1538 - accuracy: 0.9584 - val_loss: 0.2953 - val_accuracy: 0.9220
-Epoch 299/1000
-60000/60000 - 5s - loss: 0.1536 - accuracy: 0.9584 - val_loss: 0.2954 - val_accuracy: 0.9220
-Epoch 300/1000
-60000/60000 - 5s - loss: 0.1532 - accuracy: 0.9585 - val_loss: 0.2958 - val_accuracy: 0.9223
-Epoch 301/1000
-60000/60000 - 5s - loss: 0.1532 - accuracy: 0.9581 - val_loss: 0.2952 - val_accuracy: 0.9219
-Epoch 302/1000
-60000/60000 - 5s - loss: 0.1528 - accuracy: 0.9586 - val_loss: 0.2961 - val_accuracy: 0.9218
-Epoch 303/1000
-60000/60000 - 5s - loss: 0.1529 - accuracy: 0.9586 - val_loss: 0.2960 - val_accuracy: 0.9220
-Epoch 304/1000
-60000/60000 - 5s - loss: 0.1526 - accuracy: 0.9588 - val_loss: 0.2974 - val_accuracy: 0.9213
-Epoch 305/1000
-60000/60000 - 5s - loss: 0.1524 - accuracy: 0.9586 - val_loss: 0.2964 - val_accuracy: 0.9222
-Epoch 306/1000
-60000/60000 - 5s - loss: 0.1523 - accuracy: 0.9587 - val_loss: 0.2965 - val_accuracy: 0.9223
-Epoch 307/1000
-60000/60000 - 5s - loss: 0.1521 - accuracy: 0.9589 - val_loss: 0.2964 - val_accuracy: 0.9215
-Epoch 308/1000
-60000/60000 - 5s - loss: 0.1517 - accuracy: 0.9590 - val_loss: 0.2963 - val_accuracy: 0.9215
-Epoch 309/1000
-60000/60000 - 5s - loss: 0.1517 - accuracy: 0.9589 - val_loss: 0.2963 - val_accuracy: 0.9222
-Epoch 310/1000
-60000/60000 - 5s - loss: 0.1516 - accuracy: 0.9590 - val_loss: 0.2964 - val_accuracy: 0.9222
-Epoch 311/1000
-60000/60000 - 5s - loss: 0.1514 - accuracy: 0.9588 - val_loss: 0.2964 - val_accuracy: 0.9220
-Epoch 312/1000
-60000/60000 - 5s - loss: 0.1512 - accuracy: 0.9590 - val_loss: 0.2978 - val_accuracy: 0.9224
-Epoch 313/1000
-60000/60000 - 5s - loss: 0.1509 - accuracy: 0.9593 - val_loss: 0.2968 - val_accuracy: 0.9226
-Epoch 314/1000
-60000/60000 - 5s - loss: 0.1506 - accuracy: 0.9594 - val_loss: 0.2966 - val_accuracy: 0.9227
-Epoch 315/1000
-60000/60000 - 5s - loss: 0.1507 - accuracy: 0.9592 - val_loss: 0.2974 - val_accuracy: 0.9225
-Epoch 316/1000
-60000/60000 - 5s - loss: 0.1505 - accuracy: 0.9590 - val_loss: 0.2969 - val_accuracy: 0.9226
-Epoch 317/1000
-60000/60000 - 5s - loss: 0.1503 - accuracy: 0.9592 - val_loss: 0.2964 - val_accuracy: 0.9230
-Epoch 318/1000
-60000/60000 - 5s - loss: 0.1500 - accuracy: 0.9594 - val_loss: 0.2974 - val_accuracy: 0.9235
-Epoch 319/1000
-60000/60000 - 5s - loss: 0.1498 - accuracy: 0.9593 - val_loss: 0.2968 - val_accuracy: 0.9228
-Epoch 320/1000
-60000/60000 - 5s - loss: 0.1496 - accuracy: 0.9593 - val_loss: 0.2967 - val_accuracy: 0.9226
-Epoch 321/1000
-60000/60000 - 5s - loss: 0.1496 - accuracy: 0.9594 - val_loss: 0.2973 - val_accuracy: 0.9233
-Epoch 322/1000
-60000/60000 - 5s - loss: 0.1494 - accuracy: 0.9595 - val_loss: 0.2971 - val_accuracy: 0.9232
-Epoch 323/1000
-60000/60000 - 5s - loss: 0.1492 - accuracy: 0.9597 - val_loss: 0.2968 - val_accuracy: 0.9232
-Epoch 324/1000
-60000/60000 - 5s - loss: 0.1492 - accuracy: 0.9595 - val_loss: 0.2978 - val_accuracy: 0.9227
-Epoch 325/1000
-60000/60000 - 5s - loss: 0.1489 - accuracy: 0.9596 - val_loss: 0.2982 - val_accuracy: 0.9231
-Epoch 326/1000
-60000/60000 - 5s - loss: 0.1487 - accuracy: 0.9598 - val_loss: 0.2977 - val_accuracy: 0.9229
-Epoch 327/1000
-60000/60000 - 5s - loss: 0.1484 - accuracy: 0.9597 - val_loss: 0.2977 - val_accuracy: 0.9231
-Epoch 328/1000
-60000/60000 - 5s - loss: 0.1482 - accuracy: 0.9599 - val_loss: 0.2972 - val_accuracy: 0.9239
-Epoch 329/1000
-60000/60000 - 5s - loss: 0.1481 - accuracy: 0.9598 - val_loss: 0.2977 - val_accuracy: 0.9231
-Epoch 330/1000
-60000/60000 - 5s - loss: 0.1480 - accuracy: 0.9601 - val_loss: 0.2974 - val_accuracy: 0.9233
-Epoch 331/1000
-60000/60000 - 5s - loss: 0.1477 - accuracy: 0.9601 - val_loss: 0.2982 - val_accuracy: 0.9221
-Epoch 332/1000
-60000/60000 - 5s - loss: 0.1475 - accuracy: 0.9603 - val_loss: 0.2984 - val_accuracy: 0.9228
-Epoch 333/1000
-60000/60000 - 5s - loss: 0.1474 - accuracy: 0.9598 - val_loss: 0.2987 - val_accuracy: 0.9227
-Epoch 334/1000
-60000/60000 - 5s - loss: 0.1472 - accuracy: 0.9600 - val_loss: 0.2978 - val_accuracy: 0.9221
-Epoch 335/1000
-60000/60000 - 5s - loss: 0.1471 - accuracy: 0.9602 - val_loss: 0.2973 - val_accuracy: 0.9232
-Epoch 336/1000
-60000/60000 - 5s - loss: 0.1469 - accuracy: 0.9605 - val_loss: 0.2989 - val_accuracy: 0.9229
-Epoch 337/1000
-60000/60000 - 5s - loss: 0.1468 - accuracy: 0.9602 - val_loss: 0.2985 - val_accuracy: 0.9233
-Epoch 338/1000
-60000/60000 - 5s - loss: 0.1466 - accuracy: 0.9605 - val_loss: 0.2984 - val_accuracy: 0.9237
-Epoch 339/1000
-60000/60000 - 5s - loss: 0.1464 - accuracy: 0.9602 - val_loss: 0.2980 - val_accuracy: 0.9230
-Epoch 340/1000
-60000/60000 - 5s - loss: 0.1462 - accuracy: 0.9605 - val_loss: 0.2984 - val_accuracy: 0.9223
-Epoch 341/1000
-60000/60000 - 5s - loss: 0.1461 - accuracy: 0.9604 - val_loss: 0.2987 - val_accuracy: 0.9220
-Epoch 342/1000
-60000/60000 - 5s - loss: 0.1459 - accuracy: 0.9602 - val_loss: 0.2990 - val_accuracy: 0.9227
-Epoch 343/1000
-60000/60000 - 5s - loss: 0.1458 - accuracy: 0.9607 - val_loss: 0.2983 - val_accuracy: 0.9226
-Epoch 344/1000
-60000/60000 - 5s - loss: 0.1457 - accuracy: 0.9604 - val_loss: 0.2991 - val_accuracy: 0.9227
-Epoch 345/1000
-60000/60000 - 5s - loss: 0.1454 - accuracy: 0.9607 - val_loss: 0.3001 - val_accuracy: 0.9222
-Epoch 346/1000
-60000/60000 - 5s - loss: 0.1454 - accuracy: 0.9607 - val_loss: 0.2988 - val_accuracy: 0.9221
-Epoch 347/1000
-60000/60000 - 5s - loss: 0.1451 - accuracy: 0.9607 - val_loss: 0.2994 - val_accuracy: 0.9225
-Epoch 348/1000
-60000/60000 - 5s - loss: 0.1450 - accuracy: 0.9607 - val_loss: 0.2989 - val_accuracy: 0.9227
-Epoch 349/1000
-60000/60000 - 5s - loss: 0.1449 - accuracy: 0.9607 - val_loss: 0.2994 - val_accuracy: 0.9233
-Epoch 350/1000
-60000/60000 - 5s - loss: 0.1447 - accuracy: 0.9609 - val_loss: 0.2994 - val_accuracy: 0.9227
-Epoch 351/1000
-60000/60000 - 5s - loss: 0.1443 - accuracy: 0.9613 - val_loss: 0.2992 - val_accuracy: 0.9231
-Epoch 352/1000
-60000/60000 - 5s - loss: 0.1444 - accuracy: 0.9614 - val_loss: 0.2997 - val_accuracy: 0.9226
-Epoch 353/1000
-60000/60000 - 5s - loss: 0.1443 - accuracy: 0.9615 - val_loss: 0.2995 - val_accuracy: 0.9219
-Epoch 354/1000
-60000/60000 - 5s - loss: 0.1441 - accuracy: 0.9615 - val_loss: 0.2992 - val_accuracy: 0.9233
-Epoch 355/1000
-60000/60000 - 5s - loss: 0.1440 - accuracy: 0.9614 - val_loss: 0.2999 - val_accuracy: 0.9235
-Epoch 356/1000
-60000/60000 - 5s - loss: 0.1438 - accuracy: 0.9611 - val_loss: 0.2993 - val_accuracy: 0.9228
-Epoch 357/1000
-60000/60000 - 5s - loss: 0.1437 - accuracy: 0.9613 - val_loss: 0.3001 - val_accuracy: 0.9225
-Epoch 358/1000
-60000/60000 - 5s - loss: 0.1434 - accuracy: 0.9616 - val_loss: 0.2997 - val_accuracy: 0.9230
-Epoch 359/1000
-60000/60000 - 5s - loss: 0.1434 - accuracy: 0.9617 - val_loss: 0.2999 - val_accuracy: 0.9231
-Epoch 360/1000
-60000/60000 - 5s - loss: 0.1432 - accuracy: 0.9615 - val_loss: 0.2994 - val_accuracy: 0.9226
-Epoch 361/1000
-60000/60000 - 5s - loss: 0.1431 - accuracy: 0.9614 - val_loss: 0.3002 - val_accuracy: 0.9230
-Epoch 362/1000
-60000/60000 - 5s - loss: 0.1430 - accuracy: 0.9615 - val_loss: 0.2999 - val_accuracy: 0.9232
-Epoch 363/1000
-60000/60000 - 5s - loss: 0.1429 - accuracy: 0.9615 - val_loss: 0.3009 - val_accuracy: 0.9233
-Epoch 364/1000
-60000/60000 - 5s - loss: 0.1427 - accuracy: 0.9619 - val_loss: 0.3003 - val_accuracy: 0.9231
-Epoch 365/1000
-60000/60000 - 5s - loss: 0.1426 - accuracy: 0.9620 - val_loss: 0.2998 - val_accuracy: 0.9228
-Epoch 366/1000
-60000/60000 - 5s - loss: 0.1424 - accuracy: 0.9620 - val_loss: 0.3003 - val_accuracy: 0.9233
-Epoch 367/1000
-60000/60000 - 5s - loss: 0.1423 - accuracy: 0.9623 - val_loss: 0.3010 - val_accuracy: 0.9230
-Epoch 368/1000
-60000/60000 - 5s - loss: 0.1422 - accuracy: 0.9622 - val_loss: 0.3015 - val_accuracy: 0.9225
-Epoch 369/1000
-60000/60000 - 5s - loss: 0.1420 - accuracy: 0.9623 - val_loss: 0.3007 - val_accuracy: 0.9232
-Epoch 370/1000
-60000/60000 - 5s - loss: 0.1417 - accuracy: 0.9622 - val_loss: 0.3005 - val_accuracy: 0.9231
-Epoch 371/1000
-60000/60000 - 5s - loss: 0.1418 - accuracy: 0.9620 - val_loss: 0.3009 - val_accuracy: 0.9232
-Epoch 372/1000
-60000/60000 - 5s - loss: 0.1416 - accuracy: 0.9627 - val_loss: 0.3011 - val_accuracy: 0.9229
-Epoch 373/1000
-60000/60000 - 5s - loss: 0.1414 - accuracy: 0.9626 - val_loss: 0.3017 - val_accuracy: 0.9228
-Epoch 374/1000
-60000/60000 - 5s - loss: 0.1413 - accuracy: 0.9624 - val_loss: 0.3015 - val_accuracy: 0.9226
-Epoch 375/1000
-60000/60000 - 5s - loss: 0.1410 - accuracy: 0.9625 - val_loss: 0.3019 - val_accuracy: 0.9227
-Epoch 376/1000
-60000/60000 - 5s - loss: 0.1410 - accuracy: 0.9623 - val_loss: 0.3015 - val_accuracy: 0.9231
-Epoch 377/1000
-60000/60000 - 5s - loss: 0.1409 - accuracy: 0.9624 - val_loss: 0.3020 - val_accuracy: 0.9224
-Epoch 378/1000
-60000/60000 - 5s - loss: 0.1407 - accuracy: 0.9626 - val_loss: 0.3022 - val_accuracy: 0.9230
-Epoch 379/1000
-60000/60000 - 5s - loss: 0.1406 - accuracy: 0.9624 - val_loss: 0.3019 - val_accuracy: 0.9226
-Epoch 380/1000
-60000/60000 - 5s - loss: 0.1403 - accuracy: 0.9627 - val_loss: 0.3017 - val_accuracy: 0.9230
-Epoch 381/1000
-60000/60000 - 5s - loss: 0.1403 - accuracy: 0.9628 - val_loss: 0.3025 - val_accuracy: 0.9214
-Epoch 382/1000
-60000/60000 - 5s - loss: 0.1402 - accuracy: 0.9632 - val_loss: 0.3026 - val_accuracy: 0.9224
-Epoch 383/1000
-60000/60000 - 5s - loss: 0.1401 - accuracy: 0.9628 - val_loss: 0.3017 - val_accuracy: 0.9228
-Epoch 384/1000
-60000/60000 - 5s - loss: 0.1398 - accuracy: 0.9631 - val_loss: 0.3030 - val_accuracy: 0.9229
-Epoch 385/1000
-60000/60000 - 5s - loss: 0.1399 - accuracy: 0.9629 - val_loss: 0.3031 - val_accuracy: 0.9220
-Epoch 386/1000
-60000/60000 - 5s - loss: 0.1396 - accuracy: 0.9630 - val_loss: 0.3030 - val_accuracy: 0.9224
-Epoch 387/1000
-60000/60000 - 5s - loss: 0.1396 - accuracy: 0.9631 - val_loss: 0.3021 - val_accuracy: 0.9230
-Epoch 388/1000
-60000/60000 - 5s - loss: 0.1394 - accuracy: 0.9633 - val_loss: 0.3025 - val_accuracy: 0.9227
-Epoch 389/1000
-60000/60000 - 5s - loss: 0.1393 - accuracy: 0.9631 - val_loss: 0.3028 - val_accuracy: 0.9225
-Epoch 390/1000
-60000/60000 - 5s - loss: 0.1392 - accuracy: 0.9631 - val_loss: 0.3030 - val_accuracy: 0.9225
-Epoch 391/1000
-60000/60000 - 5s - loss: 0.1391 - accuracy: 0.9631 - val_loss: 0.3037 - val_accuracy: 0.9221
-Epoch 392/1000
-60000/60000 - 5s - loss: 0.1389 - accuracy: 0.9631 - val_loss: 0.3034 - val_accuracy: 0.9221
-Epoch 393/1000
-60000/60000 - 5s - loss: 0.1387 - accuracy: 0.9629 - val_loss: 0.3026 - val_accuracy: 0.9223
-Epoch 394/1000
-60000/60000 - 5s - loss: 0.1387 - accuracy: 0.9630 - val_loss: 0.3037 - val_accuracy: 0.9226
-Epoch 395/1000
-60000/60000 - 5s - loss: 0.1386 - accuracy: 0.9633 - val_loss: 0.3040 - val_accuracy: 0.9225
-Epoch 396/1000
-60000/60000 - 5s - loss: 0.1384 - accuracy: 0.9635 - val_loss: 0.3038 - val_accuracy: 0.9227
-Epoch 397/1000
-60000/60000 - 5s - loss: 0.1383 - accuracy: 0.9636 - val_loss: 0.3035 - val_accuracy: 0.9225
-Epoch 398/1000
-60000/60000 - 5s - loss: 0.1381 - accuracy: 0.9634 - val_loss: 0.3035 - val_accuracy: 0.9231
-Epoch 399/1000
-60000/60000 - 5s - loss: 0.1381 - accuracy: 0.9636 - val_loss: 0.3040 - val_accuracy: 0.9229
-Epoch 400/1000
-60000/60000 - 5s - loss: 0.1379 - accuracy: 0.9636 - val_loss: 0.3046 - val_accuracy: 0.9226
-Epoch 401/1000
-60000/60000 - 5s - loss: 0.1378 - accuracy: 0.9635 - val_loss: 0.3043 - val_accuracy: 0.9229
-Epoch 402/1000
-60000/60000 - 5s - loss: 0.1376 - accuracy: 0.9639 - val_loss: 0.3042 - val_accuracy: 0.9229
-Epoch 403/1000
-60000/60000 - 5s - loss: 0.1376 - accuracy: 0.9636 - val_loss: 0.3044 - val_accuracy: 0.9226
-Epoch 404/1000
-60000/60000 - 5s - loss: 0.1375 - accuracy: 0.9638 - val_loss: 0.3044 - val_accuracy: 0.9234
-Epoch 405/1000
-60000/60000 - 5s - loss: 0.1374 - accuracy: 0.9637 - val_loss: 0.3046 - val_accuracy: 0.9223
-Epoch 406/1000
-60000/60000 - 5s - loss: 0.1373 - accuracy: 0.9639 - val_loss: 0.3046 - val_accuracy: 0.9232
-Epoch 407/1000
-60000/60000 - 5s - loss: 0.1371 - accuracy: 0.9640 - val_loss: 0.3051 - val_accuracy: 0.9228
-Epoch 408/1000
-60000/60000 - 5s - loss: 0.1369 - accuracy: 0.9641 - val_loss: 0.3039 - val_accuracy: 0.9233
-Epoch 409/1000
-60000/60000 - 5s - loss: 0.1368 - accuracy: 0.9640 - val_loss: 0.3044 - val_accuracy: 0.9228
-Epoch 410/1000
-60000/60000 - 5s - loss: 0.1368 - accuracy: 0.9642 - val_loss: 0.3047 - val_accuracy: 0.9230
-Epoch 411/1000
-60000/60000 - 5s - loss: 0.1367 - accuracy: 0.9641 - val_loss: 0.3040 - val_accuracy: 0.9234
-Epoch 412/1000
-60000/60000 - 5s - loss: 0.1364 - accuracy: 0.9640 - val_loss: 0.3054 - val_accuracy: 0.9224
-Epoch 413/1000
-60000/60000 - 5s - loss: 0.1364 - accuracy: 0.9642 - val_loss: 0.3047 - val_accuracy: 0.9235
-Epoch 414/1000
-60000/60000 - 5s - loss: 0.1362 - accuracy: 0.9643 - val_loss: 0.3048 - val_accuracy: 0.9212
-Epoch 415/1000
-60000/60000 - 5s - loss: 0.1362 - accuracy: 0.9645 - val_loss: 0.3051 - val_accuracy: 0.9222
-Epoch 416/1000
-60000/60000 - 5s - loss: 0.1361 - accuracy: 0.9642 - val_loss: 0.3052 - val_accuracy: 0.9229
-Epoch 417/1000
-60000/60000 - 5s - loss: 0.1360 - accuracy: 0.9647 - val_loss: 0.3054 - val_accuracy: 0.9229
-Epoch 418/1000
-60000/60000 - 5s - loss: 0.1358 - accuracy: 0.9645 - val_loss: 0.3050 - val_accuracy: 0.9234
-Epoch 419/1000
-60000/60000 - 5s - loss: 0.1357 - accuracy: 0.9646 - val_loss: 0.3059 - val_accuracy: 0.9224
-Epoch 420/1000
-60000/60000 - 5s - loss: 0.1356 - accuracy: 0.9647 - val_loss: 0.3060 - val_accuracy: 0.9228
-Epoch 421/1000
-60000/60000 - 5s - loss: 0.1355 - accuracy: 0.9646 - val_loss: 0.3054 - val_accuracy: 0.9231
-Epoch 422/1000
-60000/60000 - 5s - loss: 0.1352 - accuracy: 0.9645 - val_loss: 0.3059 - val_accuracy: 0.9221
-Epoch 423/1000
-60000/60000 - 5s - loss: 0.1352 - accuracy: 0.9649 - val_loss: 0.3055 - val_accuracy: 0.9227
-Epoch 424/1000
-60000/60000 - 5s - loss: 0.1351 - accuracy: 0.9648 - val_loss: 0.3061 - val_accuracy: 0.9225
-Epoch 425/1000
-60000/60000 - 5s - loss: 0.1350 - accuracy: 0.9645 - val_loss: 0.3057 - val_accuracy: 0.9231
-Epoch 426/1000
-60000/60000 - 5s - loss: 0.1350 - accuracy: 0.9646 - val_loss: 0.3058 - val_accuracy: 0.9235
-Epoch 427/1000
-60000/60000 - 5s - loss: 0.1348 - accuracy: 0.9651 - val_loss: 0.3060 - val_accuracy: 0.9230
-Epoch 428/1000
-60000/60000 - 5s - loss: 0.1347 - accuracy: 0.9649 - val_loss: 0.3065 - val_accuracy: 0.9214
-Epoch 429/1000
-60000/60000 - 5s - loss: 0.1346 - accuracy: 0.9649 - val_loss: 0.3066 - val_accuracy: 0.9228
-Epoch 430/1000
-60000/60000 - 5s - loss: 0.1345 - accuracy: 0.9650 - val_loss: 0.3061 - val_accuracy: 0.9232
-Epoch 431/1000
-60000/60000 - 5s - loss: 0.1343 - accuracy: 0.9650 - val_loss: 0.3060 - val_accuracy: 0.9223
-Epoch 432/1000
-60000/60000 - 5s - loss: 0.1342 - accuracy: 0.9650 - val_loss: 0.3064 - val_accuracy: 0.9235
-Epoch 433/1000
-60000/60000 - 5s - loss: 0.1342 - accuracy: 0.9653 - val_loss: 0.3066 - val_accuracy: 0.9214
-Epoch 434/1000
-60000/60000 - 5s - loss: 0.1340 - accuracy: 0.9653 - val_loss: 0.3068 - val_accuracy: 0.9223
-Epoch 435/1000
-60000/60000 - 5s - loss: 0.1339 - accuracy: 0.9653 - val_loss: 0.3063 - val_accuracy: 0.9235
-Epoch 436/1000
-60000/60000 - 5s - loss: 0.1338 - accuracy: 0.9653 - val_loss: 0.3072 - val_accuracy: 0.9222
-Epoch 437/1000
-60000/60000 - 5s - loss: 0.1338 - accuracy: 0.9654 - val_loss: 0.3062 - val_accuracy: 0.9220
-Epoch 438/1000
-60000/60000 - 5s - loss: 0.1336 - accuracy: 0.9656 - val_loss: 0.3069 - val_accuracy: 0.9213
-Epoch 439/1000
-60000/60000 - 5s - loss: 0.1336 - accuracy: 0.9654 - val_loss: 0.3069 - val_accuracy: 0.9228
-Epoch 440/1000
-60000/60000 - 5s - loss: 0.1335 - accuracy: 0.9654 - val_loss: 0.3072 - val_accuracy: 0.9228
-Epoch 441/1000
-60000/60000 - 5s - loss: 0.1334 - accuracy: 0.9655 - val_loss: 0.3075 - val_accuracy: 0.9224
-Epoch 442/1000
-60000/60000 - 5s - loss: 0.1333 - accuracy: 0.9654 - val_loss: 0.3077 - val_accuracy: 0.9226
-Epoch 443/1000
-60000/60000 - 5s - loss: 0.1332 - accuracy: 0.9656 - val_loss: 0.3071 - val_accuracy: 0.9227
-Epoch 444/1000
-60000/60000 - 5s - loss: 0.1329 - accuracy: 0.9655 - val_loss: 0.3084 - val_accuracy: 0.9225
-Epoch 445/1000
-60000/60000 - 5s - loss: 0.1328 - accuracy: 0.9658 - val_loss: 0.3075 - val_accuracy: 0.9229
-Epoch 446/1000
-60000/60000 - 5s - loss: 0.1329 - accuracy: 0.9658 - val_loss: 0.3077 - val_accuracy: 0.9228
-Epoch 447/1000
-60000/60000 - 5s - loss: 0.1327 - accuracy: 0.9655 - val_loss: 0.3075 - val_accuracy: 0.9229
-Epoch 448/1000
-60000/60000 - 5s - loss: 0.1326 - accuracy: 0.9656 - val_loss: 0.3078 - val_accuracy: 0.9232
-Epoch 449/1000
-60000/60000 - 5s - loss: 0.1326 - accuracy: 0.9659 - val_loss: 0.3087 - val_accuracy: 0.9225
-Epoch 450/1000
-60000/60000 - 5s - loss: 0.1323 - accuracy: 0.9661 - val_loss: 0.3084 - val_accuracy: 0.9229
-Epoch 451/1000
-60000/60000 - 5s - loss: 0.1323 - accuracy: 0.9658 - val_loss: 0.3080 - val_accuracy: 0.9214
-Epoch 452/1000
-60000/60000 - 5s - loss: 0.1321 - accuracy: 0.9660 - val_loss: 0.3090 - val_accuracy: 0.9228
-Epoch 453/1000
-60000/60000 - 5s - loss: 0.1322 - accuracy: 0.9659 - val_loss: 0.3084 - val_accuracy: 0.9228
-Epoch 454/1000
-60000/60000 - 5s - loss: 0.1320 - accuracy: 0.9660 - val_loss: 0.3086 - val_accuracy: 0.9229
-Epoch 455/1000
-60000/60000 - 5s - loss: 0.1319 - accuracy: 0.9664 - val_loss: 0.3086 - val_accuracy: 0.9223
-Epoch 456/1000
-60000/60000 - 5s - loss: 0.1318 - accuracy: 0.9663 - val_loss: 0.3094 - val_accuracy: 0.9228
-Epoch 457/1000
-60000/60000 - 5s - loss: 0.1316 - accuracy: 0.9659 - val_loss: 0.3083 - val_accuracy: 0.9237
-Epoch 458/1000
-60000/60000 - 5s - loss: 0.1315 - accuracy: 0.9661 - val_loss: 0.3083 - val_accuracy: 0.9230
-Epoch 459/1000
-60000/60000 - 5s - loss: 0.1314 - accuracy: 0.9664 - val_loss: 0.3088 - val_accuracy: 0.9224
-Epoch 460/1000
-60000/60000 - 5s - loss: 0.1314 - accuracy: 0.9663 - val_loss: 0.3082 - val_accuracy: 0.9233
-Epoch 461/1000
-60000/60000 - 5s - loss: 0.1313 - accuracy: 0.9664 - val_loss: 0.3092 - val_accuracy: 0.9228
-Epoch 462/1000
-60000/60000 - 5s - loss: 0.1311 - accuracy: 0.9664 - val_loss: 0.3088 - val_accuracy: 0.9230
-Epoch 463/1000
-60000/60000 - 5s - loss: 0.1310 - accuracy: 0.9663 - val_loss: 0.3095 - val_accuracy: 0.9230
-Epoch 464/1000
-60000/60000 - 5s - loss: 0.1309 - accuracy: 0.9667 - val_loss: 0.3099 - val_accuracy: 0.9225
-Epoch 465/1000
-60000/60000 - 5s - loss: 0.1308 - accuracy: 0.9663 - val_loss: 0.3093 - val_accuracy: 0.9232
-Epoch 466/1000
-60000/60000 - 5s - loss: 0.1306 - accuracy: 0.9660 - val_loss: 0.3096 - val_accuracy: 0.9228
-Epoch 467/1000
-60000/60000 - 5s - loss: 0.1305 - accuracy: 0.9666 - val_loss: 0.3091 - val_accuracy: 0.9221
-Epoch 468/1000
-60000/60000 - 5s - loss: 0.1304 - accuracy: 0.9663 - val_loss: 0.3101 - val_accuracy: 0.9224
-Epoch 469/1000
-60000/60000 - 5s - loss: 0.1303 - accuracy: 0.9665 - val_loss: 0.3097 - val_accuracy: 0.9223
-Epoch 470/1000
-60000/60000 - 5s - loss: 0.1302 - accuracy: 0.9667 - val_loss: 0.3111 - val_accuracy: 0.9223
-Epoch 471/1000
-60000/60000 - 5s - loss: 0.1302 - accuracy: 0.9666 - val_loss: 0.3106 - val_accuracy: 0.9230
-Epoch 472/1000
-60000/60000 - 5s - loss: 0.1303 - accuracy: 0.9665 - val_loss: 0.3101 - val_accuracy: 0.9227
-Epoch 473/1000
-60000/60000 - 5s - loss: 0.1300 - accuracy: 0.9671 - val_loss: 0.3102 - val_accuracy: 0.9229
-Epoch 474/1000
-60000/60000 - 5s - loss: 0.1300 - accuracy: 0.9665 - val_loss: 0.3098 - val_accuracy: 0.9229
-Epoch 475/1000
-60000/60000 - 5s - loss: 0.1297 - accuracy: 0.9668 - val_loss: 0.3109 - val_accuracy: 0.9228
-Epoch 476/1000
-60000/60000 - 5s - loss: 0.1297 - accuracy: 0.9669 - val_loss: 0.3105 - val_accuracy: 0.9232
-Epoch 477/1000
-60000/60000 - 5s - loss: 0.1296 - accuracy: 0.9667 - val_loss: 0.3106 - val_accuracy: 0.9223
-Epoch 478/1000
-60000/60000 - 5s - loss: 0.1295 - accuracy: 0.9669 - val_loss: 0.3107 - val_accuracy: 0.9226
-Epoch 479/1000
-60000/60000 - 5s - loss: 0.1293 - accuracy: 0.9669 - val_loss: 0.3109 - val_accuracy: 0.9212
-Epoch 480/1000
-60000/60000 - 5s - loss: 0.1294 - accuracy: 0.9667 - val_loss: 0.3116 - val_accuracy: 0.9226
-Epoch 481/1000
-60000/60000 - 5s - loss: 0.1292 - accuracy: 0.9671 - val_loss: 0.3113 - val_accuracy: 0.9228
-Epoch 482/1000
-60000/60000 - 5s - loss: 0.1292 - accuracy: 0.9668 - val_loss: 0.3114 - val_accuracy: 0.9219
-Epoch 483/1000
-60000/60000 - 5s - loss: 0.1291 - accuracy: 0.9670 - val_loss: 0.3116 - val_accuracy: 0.9229
-Epoch 484/1000
-60000/60000 - 5s - loss: 0.1292 - accuracy: 0.9670 - val_loss: 0.3111 - val_accuracy: 0.9229
-Epoch 485/1000
-60000/60000 - 5s - loss: 0.1289 - accuracy: 0.9671 - val_loss: 0.3118 - val_accuracy: 0.9225
-Epoch 486/1000
-60000/60000 - 5s - loss: 0.1289 - accuracy: 0.9671 - val_loss: 0.3114 - val_accuracy: 0.9227
-Epoch 487/1000
-60000/60000 - 5s - loss: 0.1287 - accuracy: 0.9670 - val_loss: 0.3118 - val_accuracy: 0.9221
-Epoch 488/1000
-60000/60000 - 5s - loss: 0.1286 - accuracy: 0.9675 - val_loss: 0.3122 - val_accuracy: 0.9225
-Epoch 489/1000
-60000/60000 - 5s - loss: 0.1286 - accuracy: 0.9671 - val_loss: 0.3121 - val_accuracy: 0.9224
-Epoch 490/1000
-60000/60000 - 5s - loss: 0.1284 - accuracy: 0.9674 - val_loss: 0.3117 - val_accuracy: 0.9224
-Epoch 491/1000
-60000/60000 - 5s - loss: 0.1284 - accuracy: 0.9671 - val_loss: 0.3125 - val_accuracy: 0.9227
-Epoch 492/1000
-60000/60000 - 5s - loss: 0.1283 - accuracy: 0.9674 - val_loss: 0.3126 - val_accuracy: 0.9228
-Epoch 493/1000
-60000/60000 - 5s - loss: 0.1283 - accuracy: 0.9672 - val_loss: 0.3123 - val_accuracy: 0.9227
-Epoch 494/1000
-60000/60000 - 5s - loss: 0.1283 - accuracy: 0.9672 - val_loss: 0.3127 - val_accuracy: 0.9223
-Epoch 495/1000
-60000/60000 - 5s - loss: 0.1280 - accuracy: 0.9672 - val_loss: 0.3132 - val_accuracy: 0.9225
-Epoch 496/1000
-60000/60000 - 5s - loss: 0.1281 - accuracy: 0.9673 - val_loss: 0.3134 - val_accuracy: 0.9224
-Epoch 497/1000
-60000/60000 - 5s - loss: 0.1279 - accuracy: 0.9672 - val_loss: 0.3129 - val_accuracy: 0.9227
-Epoch 498/1000
-60000/60000 - 5s - loss: 0.1277 - accuracy: 0.9672 - val_loss: 0.3140 - val_accuracy: 0.9225
-Epoch 499/1000
-60000/60000 - 5s - loss: 0.1277 - accuracy: 0.9672 - val_loss: 0.3132 - val_accuracy: 0.9232
-Epoch 500/1000
-60000/60000 - 5s - loss: 0.1276 - accuracy: 0.9674 - val_loss: 0.3135 - val_accuracy: 0.9220
-Epoch 501/1000
-60000/60000 - 5s - loss: 0.1276 - accuracy: 0.9675 - val_loss: 0.3136 - val_accuracy: 0.9232
-Epoch 502/1000
-60000/60000 - 5s - loss: 0.1275 - accuracy: 0.9675 - val_loss: 0.3141 - val_accuracy: 0.9226
-Epoch 503/1000
-60000/60000 - 5s - loss: 0.1273 - accuracy: 0.9675 - val_loss: 0.3137 - val_accuracy: 0.9223
-Epoch 504/1000
-60000/60000 - 5s - loss: 0.1274 - accuracy: 0.9675 - val_loss: 0.3136 - val_accuracy: 0.9230
-Epoch 505/1000
-60000/60000 - 5s - loss: 0.1272 - accuracy: 0.9675 - val_loss: 0.3141 - val_accuracy: 0.9230
-Epoch 506/1000
-60000/60000 - 5s - loss: 0.1272 - accuracy: 0.9674 - val_loss: 0.3141 - val_accuracy: 0.9220
-Epoch 507/1000
-60000/60000 - 5s - loss: 0.1271 - accuracy: 0.9678 - val_loss: 0.3143 - val_accuracy: 0.9230
-Epoch 508/1000
-60000/60000 - 5s - loss: 0.1269 - accuracy: 0.9677 - val_loss: 0.3144 - val_accuracy: 0.9229
-Epoch 509/1000
-60000/60000 - 5s - loss: 0.1269 - accuracy: 0.9679 - val_loss: 0.3146 - val_accuracy: 0.9229
-Epoch 510/1000
-60000/60000 - 5s - loss: 0.1268 - accuracy: 0.9677 - val_loss: 0.3146 - val_accuracy: 0.9218
-Epoch 511/1000
-60000/60000 - 5s - loss: 0.1266 - accuracy: 0.9675 - val_loss: 0.3147 - val_accuracy: 0.9224
-Epoch 512/1000
-60000/60000 - 5s - loss: 0.1267 - accuracy: 0.9675 - val_loss: 0.3154 - val_accuracy: 0.9227
-Epoch 513/1000
-60000/60000 - 5s - loss: 0.1265 - accuracy: 0.9677 - val_loss: 0.3154 - val_accuracy: 0.9221
-Epoch 514/1000
-60000/60000 - 5s - loss: 0.1264 - accuracy: 0.9677 - val_loss: 0.3152 - val_accuracy: 0.9224
-Epoch 515/1000
-60000/60000 - 5s - loss: 0.1263 - accuracy: 0.9675 - val_loss: 0.3154 - val_accuracy: 0.9225
-Epoch 516/1000
-60000/60000 - 5s - loss: 0.1263 - accuracy: 0.9676 - val_loss: 0.3156 - val_accuracy: 0.9225
-Epoch 517/1000
-60000/60000 - 5s - loss: 0.1263 - accuracy: 0.9679 - val_loss: 0.3156 - val_accuracy: 0.9234
-Epoch 518/1000
-60000/60000 - 5s - loss: 0.1261 - accuracy: 0.9677 - val_loss: 0.3167 - val_accuracy: 0.9235
-Epoch 519/1000
-60000/60000 - 5s - loss: 0.1261 - accuracy: 0.9679 - val_loss: 0.3160 - val_accuracy: 0.9213
-Epoch 520/1000
-60000/60000 - 5s - loss: 0.1260 - accuracy: 0.9680 - val_loss: 0.3163 - val_accuracy: 0.9228
-Epoch 521/1000
-60000/60000 - 5s - loss: 0.1258 - accuracy: 0.9679 - val_loss: 0.3162 - val_accuracy: 0.9223
-Epoch 522/1000
-60000/60000 - 5s - loss: 0.1258 - accuracy: 0.9679 - val_loss: 0.3164 - val_accuracy: 0.9213
-Epoch 523/1000
-60000/60000 - 5s - loss: 0.1257 - accuracy: 0.9678 - val_loss: 0.3169 - val_accuracy: 0.9233
-Epoch 524/1000
-60000/60000 - 5s - loss: 0.1256 - accuracy: 0.9680 - val_loss: 0.3167 - val_accuracy: 0.9222
-Epoch 525/1000
-60000/60000 - 5s - loss: 0.1256 - accuracy: 0.9677 - val_loss: 0.3170 - val_accuracy: 0.9229
-Epoch 526/1000
-60000/60000 - 5s - loss: 0.1255 - accuracy: 0.9678 - val_loss: 0.3167 - val_accuracy: 0.9219
-Epoch 527/1000
-60000/60000 - 5s - loss: 0.1255 - accuracy: 0.9681 - val_loss: 0.3169 - val_accuracy: 0.9223
-Epoch 528/1000
-60000/60000 - 5s - loss: 0.1253 - accuracy: 0.9681 - val_loss: 0.3174 - val_accuracy: 0.9229
-Epoch 529/1000
-60000/60000 - 5s - loss: 0.1253 - accuracy: 0.9681 - val_loss: 0.3171 - val_accuracy: 0.9216
-Epoch 530/1000
-60000/60000 - 5s - loss: 0.1252 - accuracy: 0.9679 - val_loss: 0.3173 - val_accuracy: 0.9218
-Epoch 531/1000
-60000/60000 - 5s - loss: 0.1252 - accuracy: 0.9682 - val_loss: 0.3168 - val_accuracy: 0.9221
-Epoch 532/1000
-60000/60000 - 5s - loss: 0.1250 - accuracy: 0.9682 - val_loss: 0.3173 - val_accuracy: 0.9228
-Epoch 533/1000
-60000/60000 - 5s - loss: 0.1249 - accuracy: 0.9682 - val_loss: 0.3170 - val_accuracy: 0.9223
-Epoch 534/1000
-60000/60000 - 5s - loss: 0.1249 - accuracy: 0.9685 - val_loss: 0.3185 - val_accuracy: 0.9222
-Epoch 535/1000
-60000/60000 - 5s - loss: 0.1248 - accuracy: 0.9681 - val_loss: 0.3186 - val_accuracy: 0.9221
-Epoch 536/1000
-60000/60000 - 5s - loss: 0.1247 - accuracy: 0.9681 - val_loss: 0.3191 - val_accuracy: 0.9221
-Epoch 537/1000
-60000/60000 - 5s - loss: 0.1247 - accuracy: 0.9679 - val_loss: 0.3182 - val_accuracy: 0.9215
-Epoch 538/1000
-60000/60000 - 5s - loss: 0.1247 - accuracy: 0.9681 - val_loss: 0.3186 - val_accuracy: 0.9218
-Epoch 539/1000
-60000/60000 - 5s - loss: 0.1245 - accuracy: 0.9680 - val_loss: 0.3187 - val_accuracy: 0.9213
-Epoch 540/1000
-60000/60000 - 5s - loss: 0.1243 - accuracy: 0.9683 - val_loss: 0.3192 - val_accuracy: 0.9224
-Epoch 541/1000
-60000/60000 - 5s - loss: 0.1244 - accuracy: 0.9684 - val_loss: 0.3187 - val_accuracy: 0.9227
-Epoch 542/1000
-60000/60000 - 5s - loss: 0.1243 - accuracy: 0.9685 - val_loss: 0.3198 - val_accuracy: 0.9225
-Epoch 543/1000
-60000/60000 - 5s - loss: 0.1241 - accuracy: 0.9681 - val_loss: 0.3191 - val_accuracy: 0.9225
-Epoch 544/1000
-60000/60000 - 5s - loss: 0.1241 - accuracy: 0.9684 - val_loss: 0.3194 - val_accuracy: 0.9222
-Epoch 545/1000
-60000/60000 - 5s - loss: 0.1240 - accuracy: 0.9679 - val_loss: 0.3192 - val_accuracy: 0.9221
-Epoch 546/1000
-60000/60000 - 5s - loss: 0.1239 - accuracy: 0.9682 - val_loss: 0.3194 - val_accuracy: 0.9223
-Epoch 547/1000
-60000/60000 - 5s - loss: 0.1239 - accuracy: 0.9683 - val_loss: 0.3195 - val_accuracy: 0.9213
-Epoch 548/1000
-60000/60000 - 5s - loss: 0.1239 - accuracy: 0.9686 - val_loss: 0.3194 - val_accuracy: 0.9229
-Epoch 549/1000
-60000/60000 - 5s - loss: 0.1237 - accuracy: 0.9684 - val_loss: 0.3200 - val_accuracy: 0.9217
-Epoch 550/1000
-60000/60000 - 5s - loss: 0.1238 - accuracy: 0.9685 - val_loss: 0.3205 - val_accuracy: 0.9219
-Epoch 551/1000
-60000/60000 - 5s - loss: 0.1237 - accuracy: 0.9687 - val_loss: 0.3203 - val_accuracy: 0.9225
-Epoch 552/1000
-60000/60000 - 5s - loss: 0.1235 - accuracy: 0.9686 - val_loss: 0.3201 - val_accuracy: 0.9225
-Epoch 553/1000
-60000/60000 - 5s - loss: 0.1236 - accuracy: 0.9684 - val_loss: 0.3201 - val_accuracy: 0.9221
-Epoch 554/1000
-60000/60000 - 5s - loss: 0.1234 - accuracy: 0.9687 - val_loss: 0.3200 - val_accuracy: 0.9216
-Epoch 555/1000
-60000/60000 - 5s - loss: 0.1234 - accuracy: 0.9684 - val_loss: 0.3206 - val_accuracy: 0.9227
-Epoch 556/1000
-60000/60000 - 5s - loss: 0.1232 - accuracy: 0.9686 - val_loss: 0.3204 - val_accuracy: 0.9228
-Epoch 557/1000
-60000/60000 - 5s - loss: 0.1232 - accuracy: 0.9686 - val_loss: 0.3203 - val_accuracy: 0.9225
-Epoch 558/1000
-60000/60000 - 5s - loss: 0.1231 - accuracy: 0.9686 - val_loss: 0.3206 - val_accuracy: 0.9222
-Epoch 559/1000
-60000/60000 - 5s - loss: 0.1230 - accuracy: 0.9686 - val_loss: 0.3213 - val_accuracy: 0.9213
-Epoch 560/1000
-60000/60000 - 5s - loss: 0.1230 - accuracy: 0.9687 - val_loss: 0.3206 - val_accuracy: 0.9220
-Epoch 561/1000
-60000/60000 - 5s - loss: 0.1228 - accuracy: 0.9691 - val_loss: 0.3208 - val_accuracy: 0.9218
-Epoch 562/1000
-60000/60000 - 5s - loss: 0.1227 - accuracy: 0.9686 - val_loss: 0.3211 - val_accuracy: 0.9214
-Epoch 563/1000
-60000/60000 - 5s - loss: 0.1228 - accuracy: 0.9687 - val_loss: 0.3215 - val_accuracy: 0.9226
-Epoch 564/1000
-60000/60000 - 5s - loss: 0.1227 - accuracy: 0.9688 - val_loss: 0.3206 - val_accuracy: 0.9226
-Epoch 565/1000
-60000/60000 - 5s - loss: 0.1225 - accuracy: 0.9690 - val_loss: 0.3215 - val_accuracy: 0.9224
-Epoch 566/1000
-60000/60000 - 5s - loss: 0.1225 - accuracy: 0.9689 - val_loss: 0.3223 - val_accuracy: 0.9218
-Epoch 567/1000
-60000/60000 - 5s - loss: 0.1226 - accuracy: 0.9686 - val_loss: 0.3220 - val_accuracy: 0.9216
-Epoch 568/1000
-60000/60000 - 5s - loss: 0.1225 - accuracy: 0.9687 - val_loss: 0.3217 - val_accuracy: 0.9217
-Epoch 569/1000
-60000/60000 - 5s - loss: 0.1224 - accuracy: 0.9692 - val_loss: 0.3220 - val_accuracy: 0.9217
-Epoch 570/1000
-60000/60000 - 5s - loss: 0.1223 - accuracy: 0.9689 - val_loss: 0.3218 - val_accuracy: 0.9209
-Epoch 571/1000
-60000/60000 - 5s - loss: 0.1221 - accuracy: 0.9689 - val_loss: 0.3221 - val_accuracy: 0.9216
-Epoch 572/1000
-60000/60000 - 5s - loss: 0.1222 - accuracy: 0.9692 - val_loss: 0.3229 - val_accuracy: 0.9205
-Epoch 573/1000
-60000/60000 - 5s - loss: 0.1221 - accuracy: 0.9693 - val_loss: 0.3226 - val_accuracy: 0.9224
-Epoch 574/1000
-60000/60000 - 5s - loss: 0.1219 - accuracy: 0.9692 - val_loss: 0.3235 - val_accuracy: 0.9218
-Epoch 575/1000
-60000/60000 - 5s - loss: 0.1219 - accuracy: 0.9693 - val_loss: 0.3224 - val_accuracy: 0.9216
-Epoch 576/1000
-60000/60000 - 5s - loss: 0.1218 - accuracy: 0.9688 - val_loss: 0.3227 - val_accuracy: 0.9223
-Epoch 577/1000
-60000/60000 - 5s - loss: 0.1219 - accuracy: 0.9692 - val_loss: 0.3230 - val_accuracy: 0.9220
-Epoch 578/1000
-60000/60000 - 5s - loss: 0.1218 - accuracy: 0.9691 - val_loss: 0.3230 - val_accuracy: 0.9210
-Epoch 579/1000
-60000/60000 - 5s - loss: 0.1217 - accuracy: 0.9693 - val_loss: 0.3234 - val_accuracy: 0.9221
-Epoch 580/1000
-60000/60000 - 5s - loss: 0.1215 - accuracy: 0.9694 - val_loss: 0.3239 - val_accuracy: 0.9215
-Epoch 581/1000
-60000/60000 - 5s - loss: 0.1216 - accuracy: 0.9691 - val_loss: 0.3238 - val_accuracy: 0.9216
-Epoch 582/1000
-60000/60000 - 5s - loss: 0.1215 - accuracy: 0.9694 - val_loss: 0.3231 - val_accuracy: 0.9216
-Epoch 583/1000
-60000/60000 - 5s - loss: 0.1214 - accuracy: 0.9692 - val_loss: 0.3235 - val_accuracy: 0.9213
-Epoch 584/1000
-60000/60000 - 5s - loss: 0.1213 - accuracy: 0.9693 - val_loss: 0.3245 - val_accuracy: 0.9219
-Epoch 585/1000
-60000/60000 - 5s - loss: 0.1213 - accuracy: 0.9697 - val_loss: 0.3237 - val_accuracy: 0.9216
-Epoch 586/1000
-60000/60000 - 5s - loss: 0.1213 - accuracy: 0.9692 - val_loss: 0.3242 - val_accuracy: 0.9219
-Epoch 587/1000
-60000/60000 - 5s - loss: 0.1212 - accuracy: 0.9694 - val_loss: 0.3243 - val_accuracy: 0.9213
-Epoch 588/1000
-60000/60000 - 5s - loss: 0.1211 - accuracy: 0.9693 - val_loss: 0.3241 - val_accuracy: 0.9219
-Epoch 589/1000
-60000/60000 - 5s - loss: 0.1211 - accuracy: 0.9694 - val_loss: 0.3245 - val_accuracy: 0.9215
-Epoch 590/1000
-60000/60000 - 5s - loss: 0.1209 - accuracy: 0.9692 - val_loss: 0.3242 - val_accuracy: 0.9209
-Epoch 591/1000
-60000/60000 - 5s - loss: 0.1209 - accuracy: 0.9693 - val_loss: 0.3248 - val_accuracy: 0.9210
-Epoch 592/1000
-60000/60000 - 5s - loss: 0.1208 - accuracy: 0.9693 - val_loss: 0.3244 - val_accuracy: 0.9218
-Epoch 593/1000
-60000/60000 - 5s - loss: 0.1208 - accuracy: 0.9696 - val_loss: 0.3254 - val_accuracy: 0.9218
-Epoch 594/1000
-60000/60000 - 5s - loss: 0.1208 - accuracy: 0.9696 - val_loss: 0.3243 - val_accuracy: 0.9208
-Epoch 595/1000
-60000/60000 - 5s - loss: 0.1207 - accuracy: 0.9695 - val_loss: 0.3256 - val_accuracy: 0.9207
-Epoch 596/1000
-60000/60000 - 5s - loss: 0.1206 - accuracy: 0.9694 - val_loss: 0.3251 - val_accuracy: 0.9209
-Epoch 597/1000
-60000/60000 - 5s - loss: 0.1206 - accuracy: 0.9696 - val_loss: 0.3262 - val_accuracy: 0.9209
-Epoch 598/1000
-60000/60000 - 5s - loss: 0.1205 - accuracy: 0.9697 - val_loss: 0.3256 - val_accuracy: 0.9217
-Epoch 599/1000
-60000/60000 - 5s - loss: 0.1204 - accuracy: 0.9696 - val_loss: 0.3255 - val_accuracy: 0.9214
-Epoch 600/1000
-60000/60000 - 5s - loss: 0.1204 - accuracy: 0.9694 - val_loss: 0.3262 - val_accuracy: 0.9223
-Epoch 601/1000
-60000/60000 - 5s - loss: 0.1204 - accuracy: 0.9695 - val_loss: 0.3258 - val_accuracy: 0.9219
-Epoch 602/1000
-60000/60000 - 5s - loss: 0.1203 - accuracy: 0.9699 - val_loss: 0.3258 - val_accuracy: 0.9203
-Epoch 603/1000
-60000/60000 - 5s - loss: 0.1203 - accuracy: 0.9695 - val_loss: 0.3262 - val_accuracy: 0.9213
-Epoch 604/1000
-60000/60000 - 5s - loss: 0.1202 - accuracy: 0.9698 - val_loss: 0.3259 - val_accuracy: 0.9201
-Epoch 605/1000
-60000/60000 - 5s - loss: 0.1200 - accuracy: 0.9698 - val_loss: 0.3268 - val_accuracy: 0.9210
-Epoch 606/1000
-60000/60000 - 5s - loss: 0.1201 - accuracy: 0.9699 - val_loss: 0.3265 - val_accuracy: 0.9203
-Epoch 607/1000
-60000/60000 - 5s - loss: 0.1200 - accuracy: 0.9698 - val_loss: 0.3271 - val_accuracy: 0.9205
-Epoch 608/1000
-60000/60000 - 5s - loss: 0.1199 - accuracy: 0.9698 - val_loss: 0.3268 - val_accuracy: 0.9199
-Epoch 609/1000
-60000/60000 - 5s - loss: 0.1198 - accuracy: 0.9697 - val_loss: 0.3262 - val_accuracy: 0.9207
-Epoch 610/1000
-60000/60000 - 5s - loss: 0.1198 - accuracy: 0.9698 - val_loss: 0.3270 - val_accuracy: 0.9210
-Epoch 611/1000
-60000/60000 - 5s - loss: 0.1197 - accuracy: 0.9698 - val_loss: 0.3275 - val_accuracy: 0.9207
-Epoch 612/1000
-60000/60000 - 5s - loss: 0.1196 - accuracy: 0.9698 - val_loss: 0.3271 - val_accuracy: 0.9215
-Epoch 613/1000
-60000/60000 - 5s - loss: 0.1196 - accuracy: 0.9700 - val_loss: 0.3271 - val_accuracy: 0.9202
-Epoch 614/1000
-60000/60000 - 5s - loss: 0.1196 - accuracy: 0.9700 - val_loss: 0.3279 - val_accuracy: 0.9211
-Epoch 615/1000
-60000/60000 - 5s - loss: 0.1196 - accuracy: 0.9700 - val_loss: 0.3276 - val_accuracy: 0.9200
-Epoch 616/1000
-60000/60000 - 5s - loss: 0.1194 - accuracy: 0.9697 - val_loss: 0.3279 - val_accuracy: 0.9208
-Epoch 617/1000
-60000/60000 - 5s - loss: 0.1194 - accuracy: 0.9701 - val_loss: 0.3278 - val_accuracy: 0.9208
-Epoch 618/1000
-60000/60000 - 5s - loss: 0.1193 - accuracy: 0.9699 - val_loss: 0.3278 - val_accuracy: 0.9209
-Epoch 619/1000
-60000/60000 - 5s - loss: 0.1192 - accuracy: 0.9704 - val_loss: 0.3288 - val_accuracy: 0.9209
-Epoch 620/1000
-60000/60000 - 5s - loss: 0.1192 - accuracy: 0.9703 - val_loss: 0.3285 - val_accuracy: 0.9202
-Epoch 621/1000
-60000/60000 - 5s - loss: 0.1192 - accuracy: 0.9701 - val_loss: 0.3281 - val_accuracy: 0.9198
-Epoch 622/1000
-60000/60000 - 5s - loss: 0.1191 - accuracy: 0.9701 - val_loss: 0.3282 - val_accuracy: 0.9209
-Epoch 623/1000
-60000/60000 - 5s - loss: 0.1190 - accuracy: 0.9699 - val_loss: 0.3288 - val_accuracy: 0.9204
-Epoch 624/1000
-60000/60000 - 5s - loss: 0.1190 - accuracy: 0.9701 - val_loss: 0.3285 - val_accuracy: 0.9199
-Epoch 625/1000
-60000/60000 - 5s - loss: 0.1189 - accuracy: 0.9701 - val_loss: 0.3293 - val_accuracy: 0.9206
-Epoch 626/1000
-60000/60000 - 5s - loss: 0.1188 - accuracy: 0.9702 - val_loss: 0.3289 - val_accuracy: 0.9198
-Epoch 627/1000
-60000/60000 - 5s - loss: 0.1188 - accuracy: 0.9702 - val_loss: 0.3296 - val_accuracy: 0.9204
-Epoch 628/1000
-60000/60000 - 4s - loss: 0.1188 - accuracy: 0.9702 - val_loss: 0.3295 - val_accuracy: 0.9200
-Epoch 629/1000
-60000/60000 - 4s - loss: 0.1187 - accuracy: 0.9701 - val_loss: 0.3291 - val_accuracy: 0.9202
-Epoch 630/1000
-60000/60000 - 5s - loss: 0.1187 - accuracy: 0.9698 - val_loss: 0.3299 - val_accuracy: 0.9206
-Epoch 631/1000
-60000/60000 - 5s - loss: 0.1186 - accuracy: 0.9704 - val_loss: 0.3301 - val_accuracy: 0.9196
-Epoch 632/1000
-60000/60000 - 5s - loss: 0.1185 - accuracy: 0.9701 - val_loss: 0.3294 - val_accuracy: 0.9212
-Epoch 633/1000
-60000/60000 - 5s - loss: 0.1185 - accuracy: 0.9702 - val_loss: 0.3303 - val_accuracy: 0.9204
-Epoch 634/1000
-60000/60000 - 5s - loss: 0.1183 - accuracy: 0.9703 - val_loss: 0.3300 - val_accuracy: 0.9204
-Epoch 635/1000
-60000/60000 - 5s - loss: 0.1184 - accuracy: 0.9702 - val_loss: 0.3303 - val_accuracy: 0.9199
-Epoch 636/1000
-60000/60000 - 5s - loss: 0.1183 - accuracy: 0.9704 - val_loss: 0.3304 - val_accuracy: 0.9212
-Epoch 637/1000
-60000/60000 - 5s - loss: 0.1183 - accuracy: 0.9705 - val_loss: 0.3304 - val_accuracy: 0.9208
-Epoch 638/1000
-60000/60000 - 5s - loss: 0.1183 - accuracy: 0.9703 - val_loss: 0.3301 - val_accuracy: 0.9208
-Epoch 639/1000
-60000/60000 - 5s - loss: 0.1181 - accuracy: 0.9704 - val_loss: 0.3308 - val_accuracy: 0.9205
-Epoch 640/1000
-60000/60000 - 5s - loss: 0.1181 - accuracy: 0.9703 - val_loss: 0.3309 - val_accuracy: 0.9202
-Epoch 641/1000
-60000/60000 - 5s - loss: 0.1181 - accuracy: 0.9700 - val_loss: 0.3316 - val_accuracy: 0.9203
-Epoch 642/1000
-60000/60000 - 5s - loss: 0.1179 - accuracy: 0.9703 - val_loss: 0.3312 - val_accuracy: 0.9212
-Epoch 643/1000
-60000/60000 - 5s - loss: 0.1179 - accuracy: 0.9703 - val_loss: 0.3314 - val_accuracy: 0.9201
-Epoch 644/1000
-60000/60000 - 5s - loss: 0.1179 - accuracy: 0.9704 - val_loss: 0.3312 - val_accuracy: 0.9205
-Epoch 645/1000
-60000/60000 - 5s - loss: 0.1178 - accuracy: 0.9706 - val_loss: 0.3314 - val_accuracy: 0.9202
-Epoch 646/1000
-60000/60000 - 5s - loss: 0.1178 - accuracy: 0.9703 - val_loss: 0.3320 - val_accuracy: 0.9191
-Epoch 647/1000
-60000/60000 - 5s - loss: 0.1177 - accuracy: 0.9703 - val_loss: 0.3313 - val_accuracy: 0.9205
-Epoch 648/1000
-60000/60000 - 5s - loss: 0.1178 - accuracy: 0.9704 - val_loss: 0.3320 - val_accuracy: 0.9196
-Epoch 649/1000
-60000/60000 - 5s - loss: 0.1177 - accuracy: 0.9702 - val_loss: 0.3320 - val_accuracy: 0.9209
-Epoch 650/1000
-60000/60000 - 5s - loss: 0.1176 - accuracy: 0.9706 - val_loss: 0.3316 - val_accuracy: 0.9209
-Epoch 651/1000
-60000/60000 - 5s - loss: 0.1175 - accuracy: 0.9705 - val_loss: 0.3323 - val_accuracy: 0.9200
-Epoch 652/1000
-60000/60000 - 5s - loss: 0.1175 - accuracy: 0.9705 - val_loss: 0.3315 - val_accuracy: 0.9204
-Epoch 653/1000
-60000/60000 - 5s - loss: 0.1175 - accuracy: 0.9704 - val_loss: 0.3325 - val_accuracy: 0.9207
-Epoch 654/1000
-60000/60000 - 5s - loss: 0.1173 - accuracy: 0.9708 - val_loss: 0.3327 - val_accuracy: 0.9204
-Epoch 655/1000
-60000/60000 - 5s - loss: 0.1173 - accuracy: 0.9707 - val_loss: 0.3325 - val_accuracy: 0.9210
-Epoch 656/1000
-60000/60000 - 5s - loss: 0.1174 - accuracy: 0.9705 - val_loss: 0.3322 - val_accuracy: 0.9200
-Epoch 657/1000
-60000/60000 - 5s - loss: 0.1173 - accuracy: 0.9705 - val_loss: 0.3326 - val_accuracy: 0.9210
-Epoch 658/1000
-60000/60000 - 5s - loss: 0.1172 - accuracy: 0.9704 - val_loss: 0.3332 - val_accuracy: 0.9199
-Epoch 659/1000
-60000/60000 - 5s - loss: 0.1171 - accuracy: 0.9707 - val_loss: 0.3329 - val_accuracy: 0.9202
-Epoch 660/1000
-60000/60000 - 5s - loss: 0.1171 - accuracy: 0.9707 - val_loss: 0.3327 - val_accuracy: 0.9200
-Epoch 661/1000
-60000/60000 - 5s - loss: 0.1171 - accuracy: 0.9705 - val_loss: 0.3330 - val_accuracy: 0.9208
-Epoch 662/1000
-60000/60000 - 5s - loss: 0.1169 - accuracy: 0.9705 - val_loss: 0.3328 - val_accuracy: 0.9197
-Epoch 663/1000
-60000/60000 - 5s - loss: 0.1169 - accuracy: 0.9705 - val_loss: 0.3334 - val_accuracy: 0.9206
-Epoch 664/1000
-60000/60000 - 5s - loss: 0.1169 - accuracy: 0.9704 - val_loss: 0.3324 - val_accuracy: 0.9205
-Epoch 665/1000
-60000/60000 - 5s - loss: 0.1168 - accuracy: 0.9707 - val_loss: 0.3337 - val_accuracy: 0.9194
-Epoch 666/1000
-60000/60000 - 5s - loss: 0.1168 - accuracy: 0.9707 - val_loss: 0.3335 - val_accuracy: 0.9208
-Epoch 667/1000
-60000/60000 - 5s - loss: 0.1167 - accuracy: 0.9706 - val_loss: 0.3338 - val_accuracy: 0.9203
-Epoch 668/1000
-60000/60000 - 5s - loss: 0.1167 - accuracy: 0.9708 - val_loss: 0.3340 - val_accuracy: 0.9206
-Epoch 669/1000
-60000/60000 - 5s - loss: 0.1165 - accuracy: 0.9707 - val_loss: 0.3338 - val_accuracy: 0.9198
-Epoch 670/1000
-60000/60000 - 5s - loss: 0.1164 - accuracy: 0.9706 - val_loss: 0.3338 - val_accuracy: 0.9206
-Epoch 671/1000
-60000/60000 - 5s - loss: 0.1165 - accuracy: 0.9707 - val_loss: 0.3341 - val_accuracy: 0.9206
-Epoch 672/1000
-60000/60000 - 5s - loss: 0.1164 - accuracy: 0.9708 - val_loss: 0.3341 - val_accuracy: 0.9204
-Epoch 673/1000
-60000/60000 - 5s - loss: 0.1163 - accuracy: 0.9708 - val_loss: 0.3341 - val_accuracy: 0.9201
-Epoch 674/1000
-60000/60000 - 5s - loss: 0.1164 - accuracy: 0.9709 - val_loss: 0.3336 - val_accuracy: 0.9198
-Epoch 675/1000
-60000/60000 - 5s - loss: 0.1163 - accuracy: 0.9706 - val_loss: 0.3340 - val_accuracy: 0.9196
-Epoch 676/1000
-60000/60000 - 5s - loss: 0.1162 - accuracy: 0.9708 - val_loss: 0.3343 - val_accuracy: 0.9197
-Epoch 677/1000
-60000/60000 - 5s - loss: 0.1161 - accuracy: 0.9708 - val_loss: 0.3343 - val_accuracy: 0.9196
-Epoch 678/1000
-60000/60000 - 5s - loss: 0.1162 - accuracy: 0.9705 - val_loss: 0.3350 - val_accuracy: 0.9205
-Epoch 679/1000
-60000/60000 - 5s - loss: 0.1160 - accuracy: 0.9709 - val_loss: 0.3344 - val_accuracy: 0.9205
-Epoch 680/1000
-60000/60000 - 5s - loss: 0.1160 - accuracy: 0.9709 - val_loss: 0.3343 - val_accuracy: 0.9194
-Epoch 681/1000
-60000/60000 - 5s - loss: 0.1159 - accuracy: 0.9709 - val_loss: 0.3354 - val_accuracy: 0.9204
-Epoch 682/1000
-60000/60000 - 5s - loss: 0.1159 - accuracy: 0.9710 - val_loss: 0.3349 - val_accuracy: 0.9199
-Epoch 683/1000
-60000/60000 - 5s - loss: 0.1159 - accuracy: 0.9711 - val_loss: 0.3351 - val_accuracy: 0.9204
-Epoch 684/1000
-60000/60000 - 5s - loss: 0.1158 - accuracy: 0.9710 - val_loss: 0.3353 - val_accuracy: 0.9201
-Epoch 685/1000
-60000/60000 - 5s - loss: 0.1157 - accuracy: 0.9711 - val_loss: 0.3356 - val_accuracy: 0.9200
-Epoch 686/1000
-60000/60000 - 5s - loss: 0.1157 - accuracy: 0.9708 - val_loss: 0.3358 - val_accuracy: 0.9204
-Epoch 687/1000
-60000/60000 - 5s - loss: 0.1157 - accuracy: 0.9709 - val_loss: 0.3357 - val_accuracy: 0.9206
-Epoch 688/1000
-60000/60000 - 5s - loss: 0.1156 - accuracy: 0.9710 - val_loss: 0.3354 - val_accuracy: 0.9199
-Epoch 689/1000
-60000/60000 - 5s - loss: 0.1155 - accuracy: 0.9710 - val_loss: 0.3358 - val_accuracy: 0.9199
-Epoch 690/1000
-60000/60000 - 5s - loss: 0.1155 - accuracy: 0.9710 - val_loss: 0.3361 - val_accuracy: 0.9202
-Epoch 691/1000
-60000/60000 - 5s - loss: 0.1154 - accuracy: 0.9711 - val_loss: 0.3359 - val_accuracy: 0.9203
-Epoch 692/1000
-60000/60000 - 5s - loss: 0.1154 - accuracy: 0.9714 - val_loss: 0.3365 - val_accuracy: 0.9203
-Epoch 693/1000
-60000/60000 - 5s - loss: 0.1153 - accuracy: 0.9708 - val_loss: 0.3361 - val_accuracy: 0.9205
-Epoch 694/1000
-60000/60000 - 5s - loss: 0.1153 - accuracy: 0.9711 - val_loss: 0.3359 - val_accuracy: 0.9205
-Epoch 695/1000
-60000/60000 - 5s - loss: 0.1151 - accuracy: 0.9713 - val_loss: 0.3355 - val_accuracy: 0.9198
-Epoch 696/1000
-60000/60000 - 5s - loss: 0.1151 - accuracy: 0.9712 - val_loss: 0.3363 - val_accuracy: 0.9205
-Epoch 697/1000
-60000/60000 - 5s - loss: 0.1151 - accuracy: 0.9714 - val_loss: 0.3363 - val_accuracy: 0.9197
-Epoch 698/1000
-60000/60000 - 5s - loss: 0.1150 - accuracy: 0.9711 - val_loss: 0.3361 - val_accuracy: 0.9203
-Epoch 699/1000
-60000/60000 - 5s - loss: 0.1150 - accuracy: 0.9712 - val_loss: 0.3362 - val_accuracy: 0.9203
-Epoch 700/1000
-60000/60000 - 5s - loss: 0.1149 - accuracy: 0.9711 - val_loss: 0.3364 - val_accuracy: 0.9200
-Epoch 701/1000
-60000/60000 - 5s - loss: 0.1149 - accuracy: 0.9714 - val_loss: 0.3370 - val_accuracy: 0.9201
-Epoch 702/1000
-60000/60000 - 5s - loss: 0.1148 - accuracy: 0.9715 - val_loss: 0.3372 - val_accuracy: 0.9200
-Epoch 703/1000
-60000/60000 - 5s - loss: 0.1148 - accuracy: 0.9715 - val_loss: 0.3370 - val_accuracy: 0.9193
-Epoch 704/1000
-60000/60000 - 5s - loss: 0.1147 - accuracy: 0.9712 - val_loss: 0.3364 - val_accuracy: 0.9196
-Epoch 705/1000
-60000/60000 - 5s - loss: 0.1148 - accuracy: 0.9714 - val_loss: 0.3372 - val_accuracy: 0.9190
-Epoch 706/1000
-60000/60000 - 5s - loss: 0.1147 - accuracy: 0.9712 - val_loss: 0.3371 - val_accuracy: 0.9207
-Epoch 707/1000
-60000/60000 - 5s - loss: 0.1147 - accuracy: 0.9713 - val_loss: 0.3376 - val_accuracy: 0.9193
-Epoch 708/1000
-60000/60000 - 5s - loss: 0.1147 - accuracy: 0.9712 - val_loss: 0.3371 - val_accuracy: 0.9196
-Epoch 709/1000
-60000/60000 - 5s - loss: 0.1146 - accuracy: 0.9711 - val_loss: 0.3372 - val_accuracy: 0.9197
-Epoch 710/1000
-60000/60000 - 5s - loss: 0.1145 - accuracy: 0.9712 - val_loss: 0.3377 - val_accuracy: 0.9191
-Epoch 711/1000
-60000/60000 - 5s - loss: 0.1145 - accuracy: 0.9716 - val_loss: 0.3375 - val_accuracy: 0.9198
-Epoch 712/1000
-60000/60000 - 5s - loss: 0.1144 - accuracy: 0.9716 - val_loss: 0.3378 - val_accuracy: 0.9194
-Epoch 713/1000
-60000/60000 - 5s - loss: 0.1144 - accuracy: 0.9717 - val_loss: 0.3380 - val_accuracy: 0.9198
-Epoch 714/1000
-60000/60000 - 5s - loss: 0.1142 - accuracy: 0.9716 - val_loss: 0.3376 - val_accuracy: 0.9193
-Epoch 715/1000
-60000/60000 - 5s - loss: 0.1143 - accuracy: 0.9716 - val_loss: 0.3372 - val_accuracy: 0.9204
-Epoch 716/1000
-60000/60000 - 5s - loss: 0.1142 - accuracy: 0.9717 - val_loss: 0.3380 - val_accuracy: 0.9206
-Epoch 717/1000
-60000/60000 - 5s - loss: 0.1142 - accuracy: 0.9712 - val_loss: 0.3378 - val_accuracy: 0.9202
-Epoch 718/1000
-60000/60000 - 5s - loss: 0.1141 - accuracy: 0.9717 - val_loss: 0.3373 - val_accuracy: 0.9208
-Epoch 719/1000
-60000/60000 - 5s - loss: 0.1140 - accuracy: 0.9714 - val_loss: 0.3380 - val_accuracy: 0.9205
-Epoch 720/1000
-60000/60000 - 5s - loss: 0.1139 - accuracy: 0.9719 - val_loss: 0.3381 - val_accuracy: 0.9203
-Epoch 721/1000
-60000/60000 - 5s - loss: 0.1140 - accuracy: 0.9717 - val_loss: 0.3380 - val_accuracy: 0.9195
-Epoch 722/1000
-60000/60000 - 5s - loss: 0.1138 - accuracy: 0.9716 - val_loss: 0.3379 - val_accuracy: 0.9208
-Epoch 723/1000
-60000/60000 - 5s - loss: 0.1139 - accuracy: 0.9720 - val_loss: 0.3378 - val_accuracy: 0.9207
-Epoch 724/1000
-60000/60000 - 5s - loss: 0.1139 - accuracy: 0.9715 - val_loss: 0.3375 - val_accuracy: 0.9201
-Epoch 725/1000
-60000/60000 - 5s - loss: 0.1138 - accuracy: 0.9712 - val_loss: 0.3384 - val_accuracy: 0.9198
-Epoch 726/1000
-60000/60000 - 5s - loss: 0.1137 - accuracy: 0.9718 - val_loss: 0.3382 - val_accuracy: 0.9195
-Epoch 727/1000
-60000/60000 - 5s - loss: 0.1137 - accuracy: 0.9715 - val_loss: 0.3386 - val_accuracy: 0.9197
-Epoch 728/1000
-60000/60000 - 5s - loss: 0.1136 - accuracy: 0.9714 - val_loss: 0.3380 - val_accuracy: 0.9193
-Epoch 729/1000
-60000/60000 - 5s - loss: 0.1136 - accuracy: 0.9717 - val_loss: 0.3376 - val_accuracy: 0.9197
-Epoch 730/1000
-60000/60000 - 5s - loss: 0.1136 - accuracy: 0.9717 - val_loss: 0.3383 - val_accuracy: 0.9211
-Epoch 731/1000
-60000/60000 - 5s - loss: 0.1135 - accuracy: 0.9720 - val_loss: 0.3381 - val_accuracy: 0.9209
-Epoch 732/1000
-60000/60000 - 5s - loss: 0.1134 - accuracy: 0.9715 - val_loss: 0.3390 - val_accuracy: 0.9206
-Epoch 733/1000
-60000/60000 - 5s - loss: 0.1133 - accuracy: 0.9718 - val_loss: 0.3388 - val_accuracy: 0.9204
-Epoch 734/1000
-60000/60000 - 5s - loss: 0.1133 - accuracy: 0.9719 - val_loss: 0.3388 - val_accuracy: 0.9194
-Epoch 735/1000
-60000/60000 - 5s - loss: 0.1133 - accuracy: 0.9717 - val_loss: 0.3381 - val_accuracy: 0.9192
-Epoch 736/1000
-60000/60000 - 5s - loss: 0.1133 - accuracy: 0.9721 - val_loss: 0.3388 - val_accuracy: 0.9204
-Epoch 737/1000
-60000/60000 - 5s - loss: 0.1132 - accuracy: 0.9718 - val_loss: 0.3391 - val_accuracy: 0.9208
-Epoch 738/1000
-60000/60000 - 5s - loss: 0.1132 - accuracy: 0.9722 - val_loss: 0.3388 - val_accuracy: 0.9197
-Epoch 739/1000
-60000/60000 - 5s - loss: 0.1131 - accuracy: 0.9716 - val_loss: 0.3392 - val_accuracy: 0.9193
-Epoch 740/1000
-60000/60000 - 5s - loss: 0.1131 - accuracy: 0.9718 - val_loss: 0.3395 - val_accuracy: 0.9208
-Epoch 741/1000
-60000/60000 - 5s - loss: 0.1130 - accuracy: 0.9717 - val_loss: 0.3390 - val_accuracy: 0.9210
-Epoch 742/1000
-60000/60000 - 5s - loss: 0.1130 - accuracy: 0.9720 - val_loss: 0.3394 - val_accuracy: 0.9194
-Epoch 743/1000
-60000/60000 - 5s - loss: 0.1129 - accuracy: 0.9720 - val_loss: 0.3398 - val_accuracy: 0.9207
-Epoch 744/1000
-60000/60000 - 5s - loss: 0.1129 - accuracy: 0.9719 - val_loss: 0.3393 - val_accuracy: 0.9199
-Epoch 745/1000
-60000/60000 - 5s - loss: 0.1128 - accuracy: 0.9719 - val_loss: 0.3387 - val_accuracy: 0.9197
-Epoch 746/1000
-60000/60000 - 5s - loss: 0.1128 - accuracy: 0.9721 - val_loss: 0.3392 - val_accuracy: 0.9192
-Epoch 747/1000
-60000/60000 - 5s - loss: 0.1128 - accuracy: 0.9718 - val_loss: 0.3398 - val_accuracy: 0.9200
-Epoch 748/1000
-60000/60000 - 5s - loss: 0.1128 - accuracy: 0.9720 - val_loss: 0.3400 - val_accuracy: 0.9196
-Epoch 749/1000
-60000/60000 - 5s - loss: 0.1127 - accuracy: 0.9719 - val_loss: 0.3395 - val_accuracy: 0.9209
-Epoch 750/1000
-60000/60000 - 5s - loss: 0.1127 - accuracy: 0.9722 - val_loss: 0.3397 - val_accuracy: 0.9201
-Epoch 751/1000
-60000/60000 - 5s - loss: 0.1127 - accuracy: 0.9721 - val_loss: 0.3397 - val_accuracy: 0.9197
-Epoch 752/1000
-60000/60000 - 5s - loss: 0.1126 - accuracy: 0.9720 - val_loss: 0.3401 - val_accuracy: 0.9200
-Epoch 753/1000
-60000/60000 - 5s - loss: 0.1125 - accuracy: 0.9720 - val_loss: 0.3399 - val_accuracy: 0.9196
-Epoch 754/1000
-60000/60000 - 5s - loss: 0.1125 - accuracy: 0.9721 - val_loss: 0.3407 - val_accuracy: 0.9204
-Epoch 755/1000
-60000/60000 - 5s - loss: 0.1124 - accuracy: 0.9721 - val_loss: 0.3408 - val_accuracy: 0.9202
-Epoch 756/1000
-60000/60000 - 5s - loss: 0.1124 - accuracy: 0.9720 - val_loss: 0.3397 - val_accuracy: 0.9199
-Epoch 757/1000
-60000/60000 - 5s - loss: 0.1124 - accuracy: 0.9723 - val_loss: 0.3405 - val_accuracy: 0.9199
-Epoch 758/1000
-60000/60000 - 5s - loss: 0.1123 - accuracy: 0.9718 - val_loss: 0.3406 - val_accuracy: 0.9200
-Epoch 759/1000
-60000/60000 - 5s - loss: 0.1122 - accuracy: 0.9721 - val_loss: 0.3407 - val_accuracy: 0.9192
-Epoch 760/1000
-60000/60000 - 5s - loss: 0.1122 - accuracy: 0.9721 - val_loss: 0.3401 - val_accuracy: 0.9202
-Epoch 761/1000
-60000/60000 - 5s - loss: 0.1122 - accuracy: 0.9717 - val_loss: 0.3410 - val_accuracy: 0.9186
-Epoch 762/1000
-60000/60000 - 5s - loss: 0.1122 - accuracy: 0.9718 - val_loss: 0.3411 - val_accuracy: 0.9204
-Epoch 763/1000
-60000/60000 - 5s - loss: 0.1121 - accuracy: 0.9722 - val_loss: 0.3419 - val_accuracy: 0.9187
-Epoch 764/1000
-60000/60000 - 5s - loss: 0.1121 - accuracy: 0.9722 - val_loss: 0.3408 - val_accuracy: 0.9202
-Epoch 765/1000
-60000/60000 - 5s - loss: 0.1121 - accuracy: 0.9725 - val_loss: 0.3415 - val_accuracy: 0.9195
-Epoch 766/1000
-60000/60000 - 5s - loss: 0.1120 - accuracy: 0.9721 - val_loss: 0.3417 - val_accuracy: 0.9201
-Epoch 767/1000
-60000/60000 - 5s - loss: 0.1120 - accuracy: 0.9718 - val_loss: 0.3413 - val_accuracy: 0.9193
-Epoch 768/1000
-60000/60000 - 5s - loss: 0.1119 - accuracy: 0.9725 - val_loss: 0.3415 - val_accuracy: 0.9193
-Epoch 769/1000
-60000/60000 - 5s - loss: 0.1119 - accuracy: 0.9720 - val_loss: 0.3407 - val_accuracy: 0.9200
-Epoch 770/1000
-60000/60000 - 5s - loss: 0.1118 - accuracy: 0.9718 - val_loss: 0.3415 - val_accuracy: 0.9195
-Epoch 771/1000
-60000/60000 - 5s - loss: 0.1118 - accuracy: 0.9724 - val_loss: 0.3415 - val_accuracy: 0.9198
-Epoch 772/1000
-60000/60000 - 5s - loss: 0.1117 - accuracy: 0.9721 - val_loss: 0.3415 - val_accuracy: 0.9191
-Epoch 773/1000
-60000/60000 - 5s - loss: 0.1117 - accuracy: 0.9722 - val_loss: 0.3421 - val_accuracy: 0.9198
-Epoch 774/1000
-60000/60000 - 5s - loss: 0.1117 - accuracy: 0.9724 - val_loss: 0.3415 - val_accuracy: 0.9203
-Epoch 775/1000
-60000/60000 - 5s - loss: 0.1117 - accuracy: 0.9722 - val_loss: 0.3416 - val_accuracy: 0.9203
-Epoch 776/1000
-60000/60000 - 5s - loss: 0.1116 - accuracy: 0.9721 - val_loss: 0.3417 - val_accuracy: 0.9202
-Epoch 777/1000
-60000/60000 - 5s - loss: 0.1115 - accuracy: 0.9723 - val_loss: 0.3422 - val_accuracy: 0.9188
-Epoch 778/1000
-60000/60000 - 5s - loss: 0.1114 - accuracy: 0.9724 - val_loss: 0.3419 - val_accuracy: 0.9192
-Epoch 779/1000
-60000/60000 - 5s - loss: 0.1115 - accuracy: 0.9723 - val_loss: 0.3418 - val_accuracy: 0.9193
-Epoch 780/1000
-60000/60000 - 5s - loss: 0.1114 - accuracy: 0.9722 - val_loss: 0.3422 - val_accuracy: 0.9188
-Epoch 781/1000
-60000/60000 - 5s - loss: 0.1114 - accuracy: 0.9724 - val_loss: 0.3422 - val_accuracy: 0.9198
-Epoch 782/1000
-60000/60000 - 5s - loss: 0.1114 - accuracy: 0.9725 - val_loss: 0.3416 - val_accuracy: 0.9208
-Epoch 783/1000
-60000/60000 - 5s - loss: 0.1114 - accuracy: 0.9724 - val_loss: 0.3427 - val_accuracy: 0.9201
-Epoch 784/1000
-60000/60000 - 5s - loss: 0.1113 - accuracy: 0.9722 - val_loss: 0.3415 - val_accuracy: 0.9196
-Epoch 785/1000
-60000/60000 - 5s - loss: 0.1113 - accuracy: 0.9724 - val_loss: 0.3419 - val_accuracy: 0.9199
-Epoch 786/1000
-60000/60000 - 5s - loss: 0.1112 - accuracy: 0.9724 - val_loss: 0.3422 - val_accuracy: 0.9192
-Epoch 787/1000
-60000/60000 - 5s - loss: 0.1112 - accuracy: 0.9723 - val_loss: 0.3423 - val_accuracy: 0.9196
-Epoch 788/1000
-60000/60000 - 5s - loss: 0.1112 - accuracy: 0.9722 - val_loss: 0.3426 - val_accuracy: 0.9206
-Epoch 789/1000
-60000/60000 - 5s - loss: 0.1111 - accuracy: 0.9725 - val_loss: 0.3421 - val_accuracy: 0.9197
-Epoch 790/1000
-60000/60000 - 5s - loss: 0.1111 - accuracy: 0.9725 - val_loss: 0.3426 - val_accuracy: 0.9204
-Epoch 791/1000
-60000/60000 - 5s - loss: 0.1110 - accuracy: 0.9724 - val_loss: 0.3422 - val_accuracy: 0.9200
-Epoch 792/1000
-60000/60000 - 5s - loss: 0.1109 - accuracy: 0.9726 - val_loss: 0.3428 - val_accuracy: 0.9200
-Epoch 793/1000
-60000/60000 - 5s - loss: 0.1109 - accuracy: 0.9723 - val_loss: 0.3423 - val_accuracy: 0.9206
-Epoch 794/1000
-60000/60000 - 5s - loss: 0.1110 - accuracy: 0.9725 - val_loss: 0.3429 - val_accuracy: 0.9191
-Epoch 795/1000
-60000/60000 - 5s - loss: 0.1109 - accuracy: 0.9726 - val_loss: 0.3426 - val_accuracy: 0.9210
-Epoch 796/1000
-60000/60000 - 5s - loss: 0.1108 - accuracy: 0.9727 - val_loss: 0.3429 - val_accuracy: 0.9198
-Epoch 797/1000
-60000/60000 - 5s - loss: 0.1108 - accuracy: 0.9726 - val_loss: 0.3427 - val_accuracy: 0.9190
-Epoch 798/1000
-60000/60000 - 5s - loss: 0.1107 - accuracy: 0.9725 - val_loss: 0.3432 - val_accuracy: 0.9197
-Epoch 799/1000
-60000/60000 - 5s - loss: 0.1107 - accuracy: 0.9727 - val_loss: 0.3426 - val_accuracy: 0.9198
-Epoch 800/1000
-60000/60000 - 5s - loss: 0.1107 - accuracy: 0.9729 - val_loss: 0.3431 - val_accuracy: 0.9189
-Epoch 801/1000
-60000/60000 - 5s - loss: 0.1107 - accuracy: 0.9727 - val_loss: 0.3432 - val_accuracy: 0.9199
-Epoch 802/1000
-60000/60000 - 5s - loss: 0.1105 - accuracy: 0.9725 - val_loss: 0.3434 - val_accuracy: 0.9202
-Epoch 803/1000
-60000/60000 - 5s - loss: 0.1106 - accuracy: 0.9727 - val_loss: 0.3431 - val_accuracy: 0.9197
-Epoch 804/1000
-60000/60000 - 5s - loss: 0.1105 - accuracy: 0.9728 - val_loss: 0.3434 - val_accuracy: 0.9192
-Epoch 805/1000
-60000/60000 - 5s - loss: 0.1104 - accuracy: 0.9728 - val_loss: 0.3438 - val_accuracy: 0.9205
-Epoch 806/1000
-60000/60000 - 5s - loss: 0.1105 - accuracy: 0.9726 - val_loss: 0.3438 - val_accuracy: 0.9196
-Epoch 807/1000
-60000/60000 - 5s - loss: 0.1104 - accuracy: 0.9724 - val_loss: 0.3439 - val_accuracy: 0.9197
-Epoch 808/1000
-60000/60000 - 5s - loss: 0.1104 - accuracy: 0.9729 - val_loss: 0.3438 - val_accuracy: 0.9198
-Epoch 809/1000
-60000/60000 - 5s - loss: 0.1103 - accuracy: 0.9726 - val_loss: 0.3436 - val_accuracy: 0.9196
-Epoch 810/1000
-60000/60000 - 5s - loss: 0.1103 - accuracy: 0.9726 - val_loss: 0.3439 - val_accuracy: 0.9208
-Epoch 811/1000
-60000/60000 - 5s - loss: 0.1103 - accuracy: 0.9730 - val_loss: 0.3439 - val_accuracy: 0.9213
-Epoch 812/1000
-60000/60000 - 5s - loss: 0.1103 - accuracy: 0.9728 - val_loss: 0.3443 - val_accuracy: 0.9204
-Epoch 813/1000
-60000/60000 - 5s - loss: 0.1102 - accuracy: 0.9729 - val_loss: 0.3439 - val_accuracy: 0.9206
-Epoch 814/1000
-60000/60000 - 5s - loss: 0.1101 - accuracy: 0.9730 - val_loss: 0.3443 - val_accuracy: 0.9202
-Epoch 815/1000
-60000/60000 - 5s - loss: 0.1102 - accuracy: 0.9726 - val_loss: 0.3439 - val_accuracy: 0.9205
-Epoch 816/1000
-60000/60000 - 5s - loss: 0.1101 - accuracy: 0.9730 - val_loss: 0.3437 - val_accuracy: 0.9202
-Epoch 817/1000
-60000/60000 - 5s - loss: 0.1101 - accuracy: 0.9729 - val_loss: 0.3445 - val_accuracy: 0.9193
-Epoch 818/1000
-60000/60000 - 5s - loss: 0.1101 - accuracy: 0.9728 - val_loss: 0.3443 - val_accuracy: 0.9199
-Epoch 819/1000
-60000/60000 - 5s - loss: 0.1100 - accuracy: 0.9728 - val_loss: 0.3441 - val_accuracy: 0.9196
-Epoch 820/1000
-60000/60000 - 5s - loss: 0.1100 - accuracy: 0.9728 - val_loss: 0.3440 - val_accuracy: 0.9196
-Epoch 821/1000
-60000/60000 - 5s - loss: 0.1100 - accuracy: 0.9729 - val_loss: 0.3446 - val_accuracy: 0.9197
-Epoch 822/1000
-60000/60000 - 5s - loss: 0.1098 - accuracy: 0.9731 - val_loss: 0.3450 - val_accuracy: 0.9204
-Epoch 823/1000
-60000/60000 - 5s - loss: 0.1098 - accuracy: 0.9729 - val_loss: 0.3444 - val_accuracy: 0.9195
-Epoch 824/1000
-60000/60000 - 5s - loss: 0.1098 - accuracy: 0.9729 - val_loss: 0.3449 - val_accuracy: 0.9195
-Epoch 825/1000
-60000/60000 - 5s - loss: 0.1097 - accuracy: 0.9729 - val_loss: 0.3446 - val_accuracy: 0.9208
-Epoch 826/1000
-60000/60000 - 5s - loss: 0.1098 - accuracy: 0.9730 - val_loss: 0.3447 - val_accuracy: 0.9207
-Epoch 827/1000
-60000/60000 - 5s - loss: 0.1098 - accuracy: 0.9729 - val_loss: 0.3444 - val_accuracy: 0.9205
-Epoch 828/1000
-60000/60000 - 5s - loss: 0.1097 - accuracy: 0.9733 - val_loss: 0.3442 - val_accuracy: 0.9204
-Epoch 829/1000
-60000/60000 - 5s - loss: 0.1097 - accuracy: 0.9728 - val_loss: 0.3450 - val_accuracy: 0.9197
-Epoch 830/1000
-60000/60000 - 5s - loss: 0.1097 - accuracy: 0.9730 - val_loss: 0.3446 - val_accuracy: 0.9200
-Epoch 831/1000
-60000/60000 - 5s - loss: 0.1096 - accuracy: 0.9732 - val_loss: 0.3451 - val_accuracy: 0.9196
-Epoch 832/1000
-60000/60000 - 5s - loss: 0.1095 - accuracy: 0.9730 - val_loss: 0.3450 - val_accuracy: 0.9202
-Epoch 833/1000
-60000/60000 - 5s - loss: 0.1096 - accuracy: 0.9731 - val_loss: 0.3447 - val_accuracy: 0.9204
-Epoch 834/1000
-60000/60000 - 5s - loss: 0.1095 - accuracy: 0.9730 - val_loss: 0.3446 - val_accuracy: 0.9199
-Epoch 835/1000
-60000/60000 - 5s - loss: 0.1094 - accuracy: 0.9731 - val_loss: 0.3452 - val_accuracy: 0.9197
-Epoch 836/1000
-60000/60000 - 5s - loss: 0.1095 - accuracy: 0.9731 - val_loss: 0.3454 - val_accuracy: 0.9196
-Epoch 837/1000
-60000/60000 - 5s - loss: 0.1094 - accuracy: 0.9733 - val_loss: 0.3455 - val_accuracy: 0.9201
-Epoch 838/1000
-60000/60000 - 5s - loss: 0.1093 - accuracy: 0.9731 - val_loss: 0.3454 - val_accuracy: 0.9204
-Epoch 839/1000
-60000/60000 - 5s - loss: 0.1093 - accuracy: 0.9733 - val_loss: 0.3454 - val_accuracy: 0.9201
-Epoch 840/1000
-60000/60000 - 5s - loss: 0.1093 - accuracy: 0.9730 - val_loss: 0.3448 - val_accuracy: 0.9198
-Epoch 841/1000
-60000/60000 - 5s - loss: 0.1092 - accuracy: 0.9731 - val_loss: 0.3452 - val_accuracy: 0.9210
-Epoch 842/1000
-60000/60000 - 5s - loss: 0.1091 - accuracy: 0.9732 - val_loss: 0.3455 - val_accuracy: 0.9198
-Epoch 843/1000
-60000/60000 - 5s - loss: 0.1092 - accuracy: 0.9731 - val_loss: 0.3458 - val_accuracy: 0.9195
-Epoch 844/1000
-60000/60000 - 5s - loss: 0.1092 - accuracy: 0.9732 - val_loss: 0.3453 - val_accuracy: 0.9198
-Epoch 845/1000
-60000/60000 - 5s - loss: 0.1091 - accuracy: 0.9730 - val_loss: 0.3456 - val_accuracy: 0.9201
-Epoch 846/1000
-60000/60000 - 5s - loss: 0.1091 - accuracy: 0.9735 - val_loss: 0.3462 - val_accuracy: 0.9194
-Epoch 847/1000
-60000/60000 - 5s - loss: 0.1091 - accuracy: 0.9732 - val_loss: 0.3457 - val_accuracy: 0.9196
-Epoch 848/1000
-60000/60000 - 5s - loss: 0.1090 - accuracy: 0.9731 - val_loss: 0.3461 - val_accuracy: 0.9200
-Epoch 849/1000
-60000/60000 - 5s - loss: 0.1089 - accuracy: 0.9732 - val_loss: 0.3457 - val_accuracy: 0.9203
-Epoch 850/1000
-60000/60000 - 5s - loss: 0.1089 - accuracy: 0.9734 - val_loss: 0.3462 - val_accuracy: 0.9201
-Epoch 851/1000
-60000/60000 - 5s - loss: 0.1089 - accuracy: 0.9734 - val_loss: 0.3470 - val_accuracy: 0.9204
-Epoch 852/1000
-60000/60000 - 5s - loss: 0.1089 - accuracy: 0.9735 - val_loss: 0.3464 - val_accuracy: 0.9197
-Epoch 853/1000
-60000/60000 - 5s - loss: 0.1088 - accuracy: 0.9732 - val_loss: 0.3455 - val_accuracy: 0.9198
-Epoch 854/1000
-60000/60000 - 5s - loss: 0.1088 - accuracy: 0.9735 - val_loss: 0.3462 - val_accuracy: 0.9204
-Epoch 855/1000
-60000/60000 - 5s - loss: 0.1088 - accuracy: 0.9736 - val_loss: 0.3464 - val_accuracy: 0.9209
-Epoch 856/1000
-60000/60000 - 5s - loss: 0.1087 - accuracy: 0.9734 - val_loss: 0.3467 - val_accuracy: 0.9205
-Epoch 857/1000
-60000/60000 - 5s - loss: 0.1087 - accuracy: 0.9734 - val_loss: 0.3471 - val_accuracy: 0.9207
-Epoch 858/1000
-60000/60000 - 5s - loss: 0.1087 - accuracy: 0.9735 - val_loss: 0.3467 - val_accuracy: 0.9199
-Epoch 859/1000
-60000/60000 - 5s - loss: 0.1087 - accuracy: 0.9733 - val_loss: 0.3469 - val_accuracy: 0.9202
-Epoch 860/1000
-60000/60000 - 5s - loss: 0.1085 - accuracy: 0.9735 - val_loss: 0.3472 - val_accuracy: 0.9205
-Epoch 861/1000
-60000/60000 - 5s - loss: 0.1086 - accuracy: 0.9733 - val_loss: 0.3473 - val_accuracy: 0.9192
-Epoch 862/1000
-60000/60000 - 5s - loss: 0.1085 - accuracy: 0.9736 - val_loss: 0.3468 - val_accuracy: 0.9197
-Epoch 863/1000
-60000/60000 - 5s - loss: 0.1085 - accuracy: 0.9736 - val_loss: 0.3472 - val_accuracy: 0.9199
-Epoch 864/1000
-60000/60000 - 5s - loss: 0.1084 - accuracy: 0.9736 - val_loss: 0.3475 - val_accuracy: 0.9196
-Epoch 865/1000
-60000/60000 - 5s - loss: 0.1084 - accuracy: 0.9736 - val_loss: 0.3472 - val_accuracy: 0.9206
-Epoch 866/1000
-60000/60000 - 5s - loss: 0.1084 - accuracy: 0.9735 - val_loss: 0.3472 - val_accuracy: 0.9200
-Epoch 867/1000
-60000/60000 - 5s - loss: 0.1084 - accuracy: 0.9735 - val_loss: 0.3475 - val_accuracy: 0.9201
-Epoch 868/1000
-60000/60000 - 5s - loss: 0.1083 - accuracy: 0.9736 - val_loss: 0.3475 - val_accuracy: 0.9202
-Epoch 869/1000
-60000/60000 - 5s - loss: 0.1084 - accuracy: 0.9731 - val_loss: 0.3479 - val_accuracy: 0.9205
-Epoch 870/1000
-60000/60000 - 5s - loss: 0.1083 - accuracy: 0.9737 - val_loss: 0.3483 - val_accuracy: 0.9194
-Epoch 871/1000
-60000/60000 - 5s - loss: 0.1083 - accuracy: 0.9733 - val_loss: 0.3475 - val_accuracy: 0.9198
-Epoch 872/1000
-60000/60000 - 5s - loss: 0.1083 - accuracy: 0.9735 - val_loss: 0.3480 - val_accuracy: 0.9197
-Epoch 873/1000
-60000/60000 - 5s - loss: 0.1082 - accuracy: 0.9737 - val_loss: 0.3478 - val_accuracy: 0.9198
-Epoch 874/1000
-60000/60000 - 5s - loss: 0.1082 - accuracy: 0.9735 - val_loss: 0.3484 - val_accuracy: 0.9204
-Epoch 875/1000
-60000/60000 - 5s - loss: 0.1081 - accuracy: 0.9734 - val_loss: 0.3479 - val_accuracy: 0.9210
-Epoch 876/1000
-60000/60000 - 5s - loss: 0.1082 - accuracy: 0.9736 - val_loss: 0.3477 - val_accuracy: 0.9198
-Epoch 877/1000
-60000/60000 - 5s - loss: 0.1081 - accuracy: 0.9737 - val_loss: 0.3478 - val_accuracy: 0.9201
-Epoch 878/1000
-60000/60000 - 5s - loss: 0.1080 - accuracy: 0.9733 - val_loss: 0.3483 - val_accuracy: 0.9202
-Epoch 879/1000
-60000/60000 - 5s - loss: 0.1080 - accuracy: 0.9735 - val_loss: 0.3484 - val_accuracy: 0.9189
-Epoch 880/1000
-60000/60000 - 5s - loss: 0.1080 - accuracy: 0.9734 - val_loss: 0.3483 - val_accuracy: 0.9196
-Epoch 881/1000
-60000/60000 - 5s - loss: 0.1079 - accuracy: 0.9737 - val_loss: 0.3483 - val_accuracy: 0.9190
-Epoch 882/1000
-60000/60000 - 5s - loss: 0.1079 - accuracy: 0.9733 - val_loss: 0.3484 - val_accuracy: 0.9192
-Epoch 883/1000
-60000/60000 - 5s - loss: 0.1080 - accuracy: 0.9738 - val_loss: 0.3485 - val_accuracy: 0.9195
-Epoch 884/1000
-60000/60000 - 5s - loss: 0.1079 - accuracy: 0.9737 - val_loss: 0.3488 - val_accuracy: 0.9195
-Epoch 885/1000
-60000/60000 - 5s - loss: 0.1079 - accuracy: 0.9738 - val_loss: 0.3492 - val_accuracy: 0.9203
-Epoch 886/1000
-60000/60000 - 5s - loss: 0.1078 - accuracy: 0.9736 - val_loss: 0.3489 - val_accuracy: 0.9202
-Epoch 887/1000
-60000/60000 - 5s - loss: 0.1078 - accuracy: 0.9737 - val_loss: 0.3486 - val_accuracy: 0.9197
-Epoch 888/1000
-60000/60000 - 5s - loss: 0.1077 - accuracy: 0.9739 - val_loss: 0.3489 - val_accuracy: 0.9193
-Epoch 889/1000
-60000/60000 - 5s - loss: 0.1077 - accuracy: 0.9738 - val_loss: 0.3490 - val_accuracy: 0.9199
-Epoch 890/1000
-60000/60000 - 5s - loss: 0.1078 - accuracy: 0.9736 - val_loss: 0.3488 - val_accuracy: 0.9194
-Epoch 891/1000
-60000/60000 - 5s - loss: 0.1077 - accuracy: 0.9736 - val_loss: 0.3488 - val_accuracy: 0.9196
-Epoch 892/1000
-60000/60000 - 5s - loss: 0.1077 - accuracy: 0.9737 - val_loss: 0.3490 - val_accuracy: 0.9200
-Epoch 893/1000
-60000/60000 - 5s - loss: 0.1076 - accuracy: 0.9738 - val_loss: 0.3489 - val_accuracy: 0.9193
-Epoch 894/1000
-60000/60000 - 5s - loss: 0.1076 - accuracy: 0.9738 - val_loss: 0.3492 - val_accuracy: 0.9193
-Epoch 895/1000
-60000/60000 - 5s - loss: 0.1075 - accuracy: 0.9736 - val_loss: 0.3494 - val_accuracy: 0.9199
-Epoch 896/1000
-60000/60000 - 5s - loss: 0.1076 - accuracy: 0.9737 - val_loss: 0.3491 - val_accuracy: 0.9198
-Epoch 897/1000
-60000/60000 - 5s - loss: 0.1075 - accuracy: 0.9737 - val_loss: 0.3494 - val_accuracy: 0.9195
-Epoch 898/1000
-60000/60000 - 5s - loss: 0.1075 - accuracy: 0.9736 - val_loss: 0.3499 - val_accuracy: 0.9192
-Epoch 899/1000
-60000/60000 - 5s - loss: 0.1074 - accuracy: 0.9737 - val_loss: 0.3493 - val_accuracy: 0.9195
-Epoch 900/1000
-60000/60000 - 5s - loss: 0.1074 - accuracy: 0.9740 - val_loss: 0.3498 - val_accuracy: 0.9191
-Epoch 901/1000
-60000/60000 - 5s - loss: 0.1074 - accuracy: 0.9737 - val_loss: 0.3498 - val_accuracy: 0.9196
-Epoch 902/1000
-60000/60000 - 5s - loss: 0.1074 - accuracy: 0.9738 - val_loss: 0.3501 - val_accuracy: 0.9195
-Epoch 903/1000
-60000/60000 - 5s - loss: 0.1074 - accuracy: 0.9739 - val_loss: 0.3500 - val_accuracy: 0.9193
-Epoch 904/1000
-60000/60000 - 5s - loss: 0.1074 - accuracy: 0.9737 - val_loss: 0.3498 - val_accuracy: 0.9196
-Epoch 905/1000
-60000/60000 - 5s - loss: 0.1073 - accuracy: 0.9736 - val_loss: 0.3500 - val_accuracy: 0.9203
-Epoch 906/1000
-60000/60000 - 5s - loss: 0.1073 - accuracy: 0.9737 - val_loss: 0.3504 - val_accuracy: 0.9198
-Epoch 907/1000
-60000/60000 - 5s - loss: 0.1072 - accuracy: 0.9740 - val_loss: 0.3507 - val_accuracy: 0.9203
-Epoch 908/1000
-60000/60000 - 5s - loss: 0.1073 - accuracy: 0.9739 - val_loss: 0.3507 - val_accuracy: 0.9196
-Epoch 909/1000
-60000/60000 - 5s - loss: 0.1073 - accuracy: 0.9737 - val_loss: 0.3499 - val_accuracy: 0.9198
-Epoch 910/1000
-60000/60000 - 5s - loss: 0.1072 - accuracy: 0.9739 - val_loss: 0.3507 - val_accuracy: 0.9197
-Epoch 911/1000
-60000/60000 - 5s - loss: 0.1072 - accuracy: 0.9738 - val_loss: 0.3505 - val_accuracy: 0.9189
-Epoch 912/1000
-60000/60000 - 5s - loss: 0.1072 - accuracy: 0.9739 - val_loss: 0.3507 - val_accuracy: 0.9197
-Epoch 913/1000
-60000/60000 - 5s - loss: 0.1071 - accuracy: 0.9737 - val_loss: 0.3502 - val_accuracy: 0.9196
-Epoch 914/1000
-60000/60000 - 5s - loss: 0.1071 - accuracy: 0.9737 - val_loss: 0.3506 - val_accuracy: 0.9195
-Epoch 915/1000
-60000/60000 - 5s - loss: 0.1071 - accuracy: 0.9736 - val_loss: 0.3511 - val_accuracy: 0.9199
-Epoch 916/1000
-60000/60000 - 5s - loss: 0.1070 - accuracy: 0.9739 - val_loss: 0.3504 - val_accuracy: 0.9192
-Epoch 917/1000
-60000/60000 - 5s - loss: 0.1071 - accuracy: 0.9738 - val_loss: 0.3509 - val_accuracy: 0.9199
-Epoch 918/1000
-60000/60000 - 5s - loss: 0.1070 - accuracy: 0.9740 - val_loss: 0.3509 - val_accuracy: 0.9189
-Epoch 919/1000
-60000/60000 - 5s - loss: 0.1070 - accuracy: 0.9740 - val_loss: 0.3506 - val_accuracy: 0.9193
-Epoch 920/1000
-60000/60000 - 5s - loss: 0.1069 - accuracy: 0.9742 - val_loss: 0.3506 - val_accuracy: 0.9201
-Epoch 921/1000
-60000/60000 - 5s - loss: 0.1069 - accuracy: 0.9740 - val_loss: 0.3512 - val_accuracy: 0.9191
-Epoch 922/1000
-60000/60000 - 5s - loss: 0.1069 - accuracy: 0.9739 - val_loss: 0.3515 - val_accuracy: 0.9196
-Epoch 923/1000
-60000/60000 - 5s - loss: 0.1069 - accuracy: 0.9739 - val_loss: 0.3505 - val_accuracy: 0.9197
-Epoch 924/1000
-60000/60000 - 5s - loss: 0.1068 - accuracy: 0.9741 - val_loss: 0.3515 - val_accuracy: 0.9189
-Epoch 925/1000
-60000/60000 - 5s - loss: 0.1068 - accuracy: 0.9740 - val_loss: 0.3516 - val_accuracy: 0.9187
-Epoch 926/1000
-60000/60000 - 5s - loss: 0.1068 - accuracy: 0.9737 - val_loss: 0.3508 - val_accuracy: 0.9192
-Epoch 927/1000
-60000/60000 - 5s - loss: 0.1068 - accuracy: 0.9739 - val_loss: 0.3518 - val_accuracy: 0.9195
-Epoch 928/1000
-60000/60000 - 5s - loss: 0.1067 - accuracy: 0.9742 - val_loss: 0.3518 - val_accuracy: 0.9190
-Epoch 929/1000
-60000/60000 - 5s - loss: 0.1067 - accuracy: 0.9739 - val_loss: 0.3515 - val_accuracy: 0.9190
-Epoch 930/1000
-60000/60000 - 5s - loss: 0.1067 - accuracy: 0.9740 - val_loss: 0.3514 - val_accuracy: 0.9196
-Epoch 931/1000
-60000/60000 - 5s - loss: 0.1067 - accuracy: 0.9741 - val_loss: 0.3517 - val_accuracy: 0.9196
-Epoch 932/1000
-60000/60000 - 5s - loss: 0.1067 - accuracy: 0.9740 - val_loss: 0.3520 - val_accuracy: 0.9191
-Epoch 933/1000
-60000/60000 - 5s - loss: 0.1066 - accuracy: 0.9742 - val_loss: 0.3520 - val_accuracy: 0.9194
-Epoch 934/1000
-60000/60000 - 5s - loss: 0.1066 - accuracy: 0.9741 - val_loss: 0.3518 - val_accuracy: 0.9186
-Epoch 935/1000
-60000/60000 - 5s - loss: 0.1065 - accuracy: 0.9741 - val_loss: 0.3522 - val_accuracy: 0.9199
-Epoch 936/1000
-60000/60000 - 5s - loss: 0.1066 - accuracy: 0.9739 - val_loss: 0.3519 - val_accuracy: 0.9189
-Epoch 937/1000
-60000/60000 - 5s - loss: 0.1066 - accuracy: 0.9743 - val_loss: 0.3525 - val_accuracy: 0.9190
-Epoch 938/1000
-60000/60000 - 5s - loss: 0.1065 - accuracy: 0.9742 - val_loss: 0.3522 - val_accuracy: 0.9193
-Epoch 939/1000
-60000/60000 - 5s - loss: 0.1064 - accuracy: 0.9740 - val_loss: 0.3518 - val_accuracy: 0.9197
-Epoch 940/1000
-60000/60000 - 5s - loss: 0.1065 - accuracy: 0.9741 - val_loss: 0.3520 - val_accuracy: 0.9193
-Epoch 941/1000
-60000/60000 - 5s - loss: 0.1064 - accuracy: 0.9740 - val_loss: 0.3519 - val_accuracy: 0.9196
-Epoch 942/1000
-60000/60000 - 5s - loss: 0.1065 - accuracy: 0.9741 - val_loss: 0.3526 - val_accuracy: 0.9193
-Epoch 943/1000
-60000/60000 - 5s - loss: 0.1064 - accuracy: 0.9741 - val_loss: 0.3519 - val_accuracy: 0.9190
-Epoch 944/1000
-60000/60000 - 5s - loss: 0.1063 - accuracy: 0.9741 - val_loss: 0.3527 - val_accuracy: 0.9191
-Epoch 945/1000
-60000/60000 - 5s - loss: 0.1064 - accuracy: 0.9741 - val_loss: 0.3525 - val_accuracy: 0.9191
-Epoch 946/1000
-60000/60000 - 5s - loss: 0.1063 - accuracy: 0.9740 - val_loss: 0.3532 - val_accuracy: 0.9185
-Epoch 947/1000
-60000/60000 - 5s - loss: 0.1063 - accuracy: 0.9741 - val_loss: 0.3522 - val_accuracy: 0.9196
-Epoch 948/1000
-60000/60000 - 5s - loss: 0.1063 - accuracy: 0.9741 - val_loss: 0.3529 - val_accuracy: 0.9186
-Epoch 949/1000
-60000/60000 - 5s - loss: 0.1063 - accuracy: 0.9742 - val_loss: 0.3527 - val_accuracy: 0.9186
-Epoch 950/1000
-60000/60000 - 5s - loss: 0.1062 - accuracy: 0.9741 - val_loss: 0.3531 - val_accuracy: 0.9190
-Epoch 951/1000
-60000/60000 - 5s - loss: 0.1062 - accuracy: 0.9742 - val_loss: 0.3533 - val_accuracy: 0.9189
-Epoch 952/1000
-60000/60000 - 5s - loss: 0.1062 - accuracy: 0.9743 - val_loss: 0.3529 - val_accuracy: 0.9186
-Epoch 953/1000
-60000/60000 - 5s - loss: 0.1062 - accuracy: 0.9743 - val_loss: 0.3530 - val_accuracy: 0.9190
-Epoch 954/1000
-60000/60000 - 5s - loss: 0.1062 - accuracy: 0.9740 - val_loss: 0.3530 - val_accuracy: 0.9189
-Epoch 955/1000
-60000/60000 - 5s - loss: 0.1061 - accuracy: 0.9742 - val_loss: 0.3535 - val_accuracy: 0.9187
-Epoch 956/1000
-60000/60000 - 5s - loss: 0.1061 - accuracy: 0.9741 - val_loss: 0.3533 - val_accuracy: 0.9190
-Epoch 957/1000
-60000/60000 - 5s - loss: 0.1061 - accuracy: 0.9741 - val_loss: 0.3534 - val_accuracy: 0.9194
-Epoch 958/1000
-60000/60000 - 5s - loss: 0.1060 - accuracy: 0.9742 - val_loss: 0.3531 - val_accuracy: 0.9195
-Epoch 959/1000
-60000/60000 - 5s - loss: 0.1060 - accuracy: 0.9741 - val_loss: 0.3532 - val_accuracy: 0.9192
-Epoch 960/1000
-60000/60000 - 5s - loss: 0.1061 - accuracy: 0.9741 - val_loss: 0.3534 - val_accuracy: 0.9188
-Epoch 961/1000
-60000/60000 - 5s - loss: 0.1059 - accuracy: 0.9743 - val_loss: 0.3538 - val_accuracy: 0.9193
-Epoch 962/1000
-60000/60000 - 5s - loss: 0.1060 - accuracy: 0.9742 - val_loss: 0.3540 - val_accuracy: 0.9190
-Epoch 963/1000
-60000/60000 - 5s - loss: 0.1059 - accuracy: 0.9743 - val_loss: 0.3541 - val_accuracy: 0.9192
-Epoch 964/1000
-60000/60000 - 5s - loss: 0.1059 - accuracy: 0.9742 - val_loss: 0.3537 - val_accuracy: 0.9189
-Epoch 965/1000
-60000/60000 - 5s - loss: 0.1059 - accuracy: 0.9743 - val_loss: 0.3539 - val_accuracy: 0.9189
-Epoch 966/1000
-60000/60000 - 5s - loss: 0.1059 - accuracy: 0.9742 - val_loss: 0.3537 - val_accuracy: 0.9183
-Epoch 967/1000
-60000/60000 - 5s - loss: 0.1059 - accuracy: 0.9741 - val_loss: 0.3538 - val_accuracy: 0.9186
-Epoch 968/1000
-60000/60000 - 5s - loss: 0.1058 - accuracy: 0.9740 - val_loss: 0.3539 - val_accuracy: 0.9187
-Epoch 969/1000
-60000/60000 - 5s - loss: 0.1058 - accuracy: 0.9743 - val_loss: 0.3542 - val_accuracy: 0.9187
-Epoch 970/1000
-60000/60000 - 5s - loss: 0.1058 - accuracy: 0.9742 - val_loss: 0.3540 - val_accuracy: 0.9193
-Epoch 971/1000
-60000/60000 - 5s - loss: 0.1058 - accuracy: 0.9742 - val_loss: 0.3540 - val_accuracy: 0.9189
-Epoch 972/1000
-60000/60000 - 5s - loss: 0.1058 - accuracy: 0.9742 - val_loss: 0.3539 - val_accuracy: 0.9185
-Epoch 973/1000
-60000/60000 - 5s - loss: 0.1057 - accuracy: 0.9744 - val_loss: 0.3542 - val_accuracy: 0.9185
-Epoch 974/1000
-60000/60000 - 5s - loss: 0.1057 - accuracy: 0.9743 - val_loss: 0.3544 - val_accuracy: 0.9196
-Epoch 975/1000
-60000/60000 - 5s - loss: 0.1056 - accuracy: 0.9744 - val_loss: 0.3540 - val_accuracy: 0.9186
-Epoch 976/1000
-60000/60000 - 5s - loss: 0.1056 - accuracy: 0.9741 - val_loss: 0.3544 - val_accuracy: 0.9187
-Epoch 977/1000
-60000/60000 - 5s - loss: 0.1057 - accuracy: 0.9741 - val_loss: 0.3544 - val_accuracy: 0.9186
-Epoch 978/1000
-60000/60000 - 5s - loss: 0.1057 - accuracy: 0.9744 - val_loss: 0.3543 - val_accuracy: 0.9186
-Epoch 979/1000
-60000/60000 - 5s - loss: 0.1056 - accuracy: 0.9743 - val_loss: 0.3544 - val_accuracy: 0.9188
-Epoch 980/1000
-60000/60000 - 5s - loss: 0.1055 - accuracy: 0.9743 - val_loss: 0.3545 - val_accuracy: 0.9194
-Epoch 981/1000
-60000/60000 - 5s - loss: 0.1056 - accuracy: 0.9745 - val_loss: 0.3543 - val_accuracy: 0.9183
-Epoch 982/1000
-60000/60000 - 5s - loss: 0.1056 - accuracy: 0.9742 - val_loss: 0.3548 - val_accuracy: 0.9186
-Epoch 983/1000
-60000/60000 - 5s - loss: 0.1055 - accuracy: 0.9741 - val_loss: 0.3546 - val_accuracy: 0.9182
-Epoch 984/1000
-60000/60000 - 5s - loss: 0.1055 - accuracy: 0.9744 - val_loss: 0.3555 - val_accuracy: 0.9184
-Epoch 985/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9744 - val_loss: 0.3539 - val_accuracy: 0.9184
-Epoch 986/1000
-60000/60000 - 5s - loss: 0.1055 - accuracy: 0.9742 - val_loss: 0.3550 - val_accuracy: 0.9188
-Epoch 987/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9745 - val_loss: 0.3545 - val_accuracy: 0.9190
-Epoch 988/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9743 - val_loss: 0.3547 - val_accuracy: 0.9182
-Epoch 989/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9743 - val_loss: 0.3552 - val_accuracy: 0.9186
-Epoch 990/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9743 - val_loss: 0.3551 - val_accuracy: 0.9182
-Epoch 991/1000
-60000/60000 - 5s - loss: 0.1053 - accuracy: 0.9742 - val_loss: 0.3552 - val_accuracy: 0.9186
-Epoch 992/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9744 - val_loss: 0.3552 - val_accuracy: 0.9185
-Epoch 993/1000
-60000/60000 - 5s - loss: 0.1053 - accuracy: 0.9746 - val_loss: 0.3550 - val_accuracy: 0.9183
-Epoch 994/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9744 - val_loss: 0.3557 - val_accuracy: 0.9185
-Epoch 995/1000
-60000/60000 - 5s - loss: 0.1054 - accuracy: 0.9745 - val_loss: 0.3552 - val_accuracy: 0.9184
-Epoch 996/1000
-60000/60000 - 5s - loss: 0.1053 - accuracy: 0.9745 - val_loss: 0.3548 - val_accuracy: 0.9183
-Epoch 997/1000
-60000/60000 - 5s - loss: 0.1053 - accuracy: 0.9743 - val_loss: 0.3552 - val_accuracy: 0.9186
-Epoch 998/1000
-60000/60000 - 5s - loss: 0.1052 - accuracy: 0.9746 - val_loss: 0.3557 - val_accuracy: 0.9185
-Epoch 999/1000
-60000/60000 - 5s - loss: 0.1053 - accuracy: 0.9743 - val_loss: 0.3553 - val_accuracy: 0.9181
-Epoch 1000/1000
-60000/60000 - 5s - loss: 0.1051 - accuracy: 0.9744 - val_loss: 0.3556 - val_accuracy: 0.9189
-Test loss was 0.2972, test accuracy was 0.9239
diff --git a/MNIST/nonlinear_withsigmoid.txt b/MNIST/nonlinear_withsigmoid.txt
deleted file mode 100644
index efa6d8c..0000000
--- a/MNIST/nonlinear_withsigmoid.txt
+++ /dev/null
@@ -1,2005 +0,0 @@
-X_train shape: (60000, 784)
-60000 train samples
-10000 test samples
-Train on 60000 samples, validate on 10000 samples
-Epoch 1/1000
-60000/60000 - 9s - loss: 0.6632 - accuracy: 0.8073 - val_loss: 0.4125 - val_accuracy: 0.8811
-Epoch 2/1000
-60000/60000 - 6s - loss: 0.3864 - accuracy: 0.8889 - val_loss: 0.3437 - val_accuracy: 0.9008
-Epoch 3/1000
-60000/60000 - 6s - loss: 0.3298 - accuracy: 0.9053 - val_loss: 0.3090 - val_accuracy: 0.9077
-Epoch 4/1000
-60000/60000 - 6s - loss: 0.2969 - accuracy: 0.9142 - val_loss: 0.2858 - val_accuracy: 0.9154
-Epoch 5/1000
-60000/60000 - 6s - loss: 0.2737 - accuracy: 0.9216 - val_loss: 0.2694 - val_accuracy: 0.9222
-Epoch 6/1000
-60000/60000 - 6s - loss: 0.2557 - accuracy: 0.9260 - val_loss: 0.2568 - val_accuracy: 0.9241
-Epoch 7/1000
-60000/60000 - 6s - loss: 0.2413 - accuracy: 0.9311 - val_loss: 0.2480 - val_accuracy: 0.9266
-Epoch 8/1000
-60000/60000 - 6s - loss: 0.2296 - accuracy: 0.9346 - val_loss: 0.2381 - val_accuracy: 0.9299
-Epoch 9/1000
-60000/60000 - 6s - loss: 0.2197 - accuracy: 0.9376 - val_loss: 0.2308 - val_accuracy: 0.9308
-Epoch 10/1000
-60000/60000 - 6s - loss: 0.2108 - accuracy: 0.9400 - val_loss: 0.2269 - val_accuracy: 0.9312
-Epoch 11/1000
-60000/60000 - 6s - loss: 0.2033 - accuracy: 0.9421 - val_loss: 0.2207 - val_accuracy: 0.9347
-Epoch 12/1000
-60000/60000 - 6s - loss: 0.1969 - accuracy: 0.9438 - val_loss: 0.2177 - val_accuracy: 0.9346
-Epoch 13/1000
-60000/60000 - 6s - loss: 0.1906 - accuracy: 0.9456 - val_loss: 0.2138 - val_accuracy: 0.9362
-Epoch 14/1000
-60000/60000 - 6s - loss: 0.1852 - accuracy: 0.9470 - val_loss: 0.2101 - val_accuracy: 0.9372
-Epoch 15/1000
-60000/60000 - 6s - loss: 0.1803 - accuracy: 0.9488 - val_loss: 0.2087 - val_accuracy: 0.9374
-Epoch 16/1000
-60000/60000 - 6s - loss: 0.1757 - accuracy: 0.9497 - val_loss: 0.2045 - val_accuracy: 0.9381
-Epoch 17/1000
-60000/60000 - 6s - loss: 0.1715 - accuracy: 0.9506 - val_loss: 0.2035 - val_accuracy: 0.9382
-Epoch 18/1000
-60000/60000 - 6s - loss: 0.1676 - accuracy: 0.9518 - val_loss: 0.2010 - val_accuracy: 0.9388
-Epoch 19/1000
-60000/60000 - 6s - loss: 0.1640 - accuracy: 0.9525 - val_loss: 0.1997 - val_accuracy: 0.9392
-Epoch 20/1000
-60000/60000 - 6s - loss: 0.1605 - accuracy: 0.9537 - val_loss: 0.1985 - val_accuracy: 0.9395
-Epoch 21/1000
-60000/60000 - 6s - loss: 0.1570 - accuracy: 0.9545 - val_loss: 0.1982 - val_accuracy: 0.9389
-Epoch 22/1000
-60000/60000 - 6s - loss: 0.1540 - accuracy: 0.9561 - val_loss: 0.1968 - val_accuracy: 0.9399
-Epoch 23/1000
-60000/60000 - 6s - loss: 0.1511 - accuracy: 0.9567 - val_loss: 0.1947 - val_accuracy: 0.9406
-Epoch 24/1000
-60000/60000 - 6s - loss: 0.1481 - accuracy: 0.9573 - val_loss: 0.1945 - val_accuracy: 0.9405
-Epoch 25/1000
-60000/60000 - 6s - loss: 0.1458 - accuracy: 0.9582 - val_loss: 0.1936 - val_accuracy: 0.9395
-Epoch 26/1000
-60000/60000 - 6s - loss: 0.1430 - accuracy: 0.9591 - val_loss: 0.1927 - val_accuracy: 0.9405
-Epoch 27/1000
-60000/60000 - 6s - loss: 0.1404 - accuracy: 0.9596 - val_loss: 0.1909 - val_accuracy: 0.9411
-Epoch 28/1000
-60000/60000 - 6s - loss: 0.1383 - accuracy: 0.9605 - val_loss: 0.1917 - val_accuracy: 0.9418
-Epoch 29/1000
-60000/60000 - 6s - loss: 0.1362 - accuracy: 0.9608 - val_loss: 0.1896 - val_accuracy: 0.9428
-Epoch 30/1000
-60000/60000 - 6s - loss: 0.1340 - accuracy: 0.9614 - val_loss: 0.1889 - val_accuracy: 0.9427
-Epoch 31/1000
-60000/60000 - 6s - loss: 0.1319 - accuracy: 0.9618 - val_loss: 0.1891 - val_accuracy: 0.9411
-Epoch 32/1000
-60000/60000 - 6s - loss: 0.1297 - accuracy: 0.9632 - val_loss: 0.1893 - val_accuracy: 0.9418
-Epoch 33/1000
-60000/60000 - 6s - loss: 0.1280 - accuracy: 0.9632 - val_loss: 0.1895 - val_accuracy: 0.9428
-Epoch 34/1000
-60000/60000 - 6s - loss: 0.1263 - accuracy: 0.9634 - val_loss: 0.1885 - val_accuracy: 0.9424
-Epoch 35/1000
-60000/60000 - 6s - loss: 0.1243 - accuracy: 0.9646 - val_loss: 0.1885 - val_accuracy: 0.9426
-Epoch 36/1000
-60000/60000 - 6s - loss: 0.1224 - accuracy: 0.9654 - val_loss: 0.1885 - val_accuracy: 0.9425
-Epoch 37/1000
-60000/60000 - 6s - loss: 0.1207 - accuracy: 0.9659 - val_loss: 0.1901 - val_accuracy: 0.9427
-Epoch 38/1000
-60000/60000 - 6s - loss: 0.1191 - accuracy: 0.9662 - val_loss: 0.1872 - val_accuracy: 0.9429
-Epoch 39/1000
-60000/60000 - 6s - loss: 0.1174 - accuracy: 0.9665 - val_loss: 0.1893 - val_accuracy: 0.9418
-Epoch 40/1000
-60000/60000 - 6s - loss: 0.1156 - accuracy: 0.9676 - val_loss: 0.1864 - val_accuracy: 0.9450
-Epoch 41/1000
-60000/60000 - 6s - loss: 0.1145 - accuracy: 0.9675 - val_loss: 0.1871 - val_accuracy: 0.9441
-Epoch 42/1000
-60000/60000 - 6s - loss: 0.1130 - accuracy: 0.9683 - val_loss: 0.1860 - val_accuracy: 0.9441
-Epoch 43/1000
-60000/60000 - 6s - loss: 0.1115 - accuracy: 0.9682 - val_loss: 0.1863 - val_accuracy: 0.9434
-Epoch 44/1000
-60000/60000 - 6s - loss: 0.1104 - accuracy: 0.9689 - val_loss: 0.1870 - val_accuracy: 0.9439
-Epoch 45/1000
-60000/60000 - 6s - loss: 0.1088 - accuracy: 0.9695 - val_loss: 0.1859 - val_accuracy: 0.9449
-Epoch 46/1000
-60000/60000 - 6s - loss: 0.1074 - accuracy: 0.9703 - val_loss: 0.1855 - val_accuracy: 0.9448
-Epoch 47/1000
-60000/60000 - 6s - loss: 0.1063 - accuracy: 0.9703 - val_loss: 0.1842 - val_accuracy: 0.9455
-Epoch 48/1000
-60000/60000 - 6s - loss: 0.1049 - accuracy: 0.9708 - val_loss: 0.1842 - val_accuracy: 0.9461
-Epoch 49/1000
-60000/60000 - 6s - loss: 0.1037 - accuracy: 0.9712 - val_loss: 0.1847 - val_accuracy: 0.9451
-Epoch 50/1000
-60000/60000 - 6s - loss: 0.1026 - accuracy: 0.9713 - val_loss: 0.1845 - val_accuracy: 0.9455
-Epoch 51/1000
-60000/60000 - 6s - loss: 0.1014 - accuracy: 0.9719 - val_loss: 0.1840 - val_accuracy: 0.9460
-Epoch 52/1000
-60000/60000 - 6s - loss: 0.1004 - accuracy: 0.9719 - val_loss: 0.1856 - val_accuracy: 0.9459
-Epoch 53/1000
-60000/60000 - 6s - loss: 0.0991 - accuracy: 0.9726 - val_loss: 0.1858 - val_accuracy: 0.9443
-Epoch 54/1000
-60000/60000 - 6s - loss: 0.0980 - accuracy: 0.9730 - val_loss: 0.1854 - val_accuracy: 0.9465
-Epoch 55/1000
-60000/60000 - 6s - loss: 0.0971 - accuracy: 0.9733 - val_loss: 0.1841 - val_accuracy: 0.9459
-Epoch 56/1000
-60000/60000 - 6s - loss: 0.0960 - accuracy: 0.9736 - val_loss: 0.1860 - val_accuracy: 0.9453
-Epoch 57/1000
-60000/60000 - 6s - loss: 0.0950 - accuracy: 0.9741 - val_loss: 0.1850 - val_accuracy: 0.9451
-Epoch 58/1000
-60000/60000 - 6s - loss: 0.0940 - accuracy: 0.9744 - val_loss: 0.1839 - val_accuracy: 0.9454
-Epoch 59/1000
-60000/60000 - 6s - loss: 0.0930 - accuracy: 0.9745 - val_loss: 0.1839 - val_accuracy: 0.9456
-Epoch 60/1000
-60000/60000 - 6s - loss: 0.0921 - accuracy: 0.9747 - val_loss: 0.1856 - val_accuracy: 0.9462
-Epoch 61/1000
-60000/60000 - 6s - loss: 0.0912 - accuracy: 0.9750 - val_loss: 0.1836 - val_accuracy: 0.9459
-Epoch 62/1000
-60000/60000 - 6s - loss: 0.0902 - accuracy: 0.9755 - val_loss: 0.1851 - val_accuracy: 0.9466
-Epoch 63/1000
-60000/60000 - 6s - loss: 0.0893 - accuracy: 0.9759 - val_loss: 0.1838 - val_accuracy: 0.9458
-Epoch 64/1000
-60000/60000 - 6s - loss: 0.0883 - accuracy: 0.9755 - val_loss: 0.1850 - val_accuracy: 0.9455
-Epoch 65/1000
-60000/60000 - 6s - loss: 0.0878 - accuracy: 0.9766 - val_loss: 0.1849 - val_accuracy: 0.9455
-Epoch 66/1000
-60000/60000 - 6s - loss: 0.0868 - accuracy: 0.9769 - val_loss: 0.1843 - val_accuracy: 0.9460
-Epoch 67/1000
-60000/60000 - 6s - loss: 0.0860 - accuracy: 0.9766 - val_loss: 0.1848 - val_accuracy: 0.9458
-Epoch 68/1000
-60000/60000 - 6s - loss: 0.0852 - accuracy: 0.9772 - val_loss: 0.1855 - val_accuracy: 0.9464
-Epoch 69/1000
-60000/60000 - 6s - loss: 0.0842 - accuracy: 0.9773 - val_loss: 0.1871 - val_accuracy: 0.9439
-Epoch 70/1000
-60000/60000 - 6s - loss: 0.0835 - accuracy: 0.9774 - val_loss: 0.1877 - val_accuracy: 0.9448
-Epoch 71/1000
-60000/60000 - 6s - loss: 0.0827 - accuracy: 0.9778 - val_loss: 0.1849 - val_accuracy: 0.9466
-Epoch 72/1000
-60000/60000 - 6s - loss: 0.0821 - accuracy: 0.9778 - val_loss: 0.1845 - val_accuracy: 0.9465
-Epoch 73/1000
-60000/60000 - 6s - loss: 0.0815 - accuracy: 0.9782 - val_loss: 0.1866 - val_accuracy: 0.9454
-Epoch 74/1000
-60000/60000 - 6s - loss: 0.0806 - accuracy: 0.9784 - val_loss: 0.1872 - val_accuracy: 0.9453
-Epoch 75/1000
-60000/60000 - 6s - loss: 0.0797 - accuracy: 0.9786 - val_loss: 0.1858 - val_accuracy: 0.9459
-Epoch 76/1000
-60000/60000 - 6s - loss: 0.0792 - accuracy: 0.9788 - val_loss: 0.1861 - val_accuracy: 0.9460
-Epoch 77/1000
-60000/60000 - 6s - loss: 0.0784 - accuracy: 0.9789 - val_loss: 0.1896 - val_accuracy: 0.9451
-Epoch 78/1000
-60000/60000 - 6s - loss: 0.0779 - accuracy: 0.9795 - val_loss: 0.1876 - val_accuracy: 0.9461
-Epoch 79/1000
-60000/60000 - 6s - loss: 0.0771 - accuracy: 0.9797 - val_loss: 0.1884 - val_accuracy: 0.9458
-Epoch 80/1000
-60000/60000 - 6s - loss: 0.0766 - accuracy: 0.9797 - val_loss: 0.1877 - val_accuracy: 0.9458
-Epoch 81/1000
-60000/60000 - 6s - loss: 0.0759 - accuracy: 0.9798 - val_loss: 0.1873 - val_accuracy: 0.9468
-Epoch 82/1000
-60000/60000 - 6s - loss: 0.0751 - accuracy: 0.9804 - val_loss: 0.1892 - val_accuracy: 0.9458
-Epoch 83/1000
-60000/60000 - 6s - loss: 0.0745 - accuracy: 0.9803 - val_loss: 0.1885 - val_accuracy: 0.9461
-Epoch 84/1000
-60000/60000 - 6s - loss: 0.0740 - accuracy: 0.9811 - val_loss: 0.1882 - val_accuracy: 0.9466
-Epoch 85/1000
-60000/60000 - 6s - loss: 0.0732 - accuracy: 0.9812 - val_loss: 0.1876 - val_accuracy: 0.9471
-Epoch 86/1000
-60000/60000 - 6s - loss: 0.0726 - accuracy: 0.9814 - val_loss: 0.1899 - val_accuracy: 0.9458
-Epoch 87/1000
-60000/60000 - 6s - loss: 0.0720 - accuracy: 0.9812 - val_loss: 0.1908 - val_accuracy: 0.9462
-Epoch 88/1000
-60000/60000 - 6s - loss: 0.0715 - accuracy: 0.9815 - val_loss: 0.1909 - val_accuracy: 0.9452
-Epoch 89/1000
-60000/60000 - 6s - loss: 0.0710 - accuracy: 0.9816 - val_loss: 0.1906 - val_accuracy: 0.9463
-Epoch 90/1000
-60000/60000 - 6s - loss: 0.0704 - accuracy: 0.9820 - val_loss: 0.1909 - val_accuracy: 0.9465
-Epoch 91/1000
-60000/60000 - 6s - loss: 0.0697 - accuracy: 0.9818 - val_loss: 0.1896 - val_accuracy: 0.9457
-Epoch 92/1000
-60000/60000 - 6s - loss: 0.0694 - accuracy: 0.9819 - val_loss: 0.1915 - val_accuracy: 0.9469
-Epoch 93/1000
-60000/60000 - 6s - loss: 0.0686 - accuracy: 0.9824 - val_loss: 0.1933 - val_accuracy: 0.9454
-Epoch 94/1000
-60000/60000 - 6s - loss: 0.0681 - accuracy: 0.9828 - val_loss: 0.1926 - val_accuracy: 0.9459
-Epoch 95/1000
-60000/60000 - 6s - loss: 0.0677 - accuracy: 0.9825 - val_loss: 0.1926 - val_accuracy: 0.9457
-Epoch 96/1000
-60000/60000 - 6s - loss: 0.0672 - accuracy: 0.9826 - val_loss: 0.1923 - val_accuracy: 0.9462
-Epoch 97/1000
-60000/60000 - 6s - loss: 0.0667 - accuracy: 0.9831 - val_loss: 0.1924 - val_accuracy: 0.9457
-Epoch 98/1000
-60000/60000 - 6s - loss: 0.0661 - accuracy: 0.9833 - val_loss: 0.1951 - val_accuracy: 0.9452
-Epoch 99/1000
-60000/60000 - 6s - loss: 0.0657 - accuracy: 0.9833 - val_loss: 0.1931 - val_accuracy: 0.9467
-Epoch 100/1000
-60000/60000 - 6s - loss: 0.0651 - accuracy: 0.9833 - val_loss: 0.1951 - val_accuracy: 0.9468
-Epoch 101/1000
-60000/60000 - 6s - loss: 0.0645 - accuracy: 0.9835 - val_loss: 0.1939 - val_accuracy: 0.9457
-Epoch 102/1000
-60000/60000 - 6s - loss: 0.0641 - accuracy: 0.9837 - val_loss: 0.1962 - val_accuracy: 0.9462
-Epoch 103/1000
-60000/60000 - 6s - loss: 0.0637 - accuracy: 0.9834 - val_loss: 0.1946 - val_accuracy: 0.9461
-Epoch 104/1000
-60000/60000 - 6s - loss: 0.0633 - accuracy: 0.9839 - val_loss: 0.1962 - val_accuracy: 0.9463
-Epoch 105/1000
-60000/60000 - 6s - loss: 0.0628 - accuracy: 0.9842 - val_loss: 0.1972 - val_accuracy: 0.9472
-Epoch 106/1000
-60000/60000 - 6s - loss: 0.0624 - accuracy: 0.9843 - val_loss: 0.1986 - val_accuracy: 0.9454
-Epoch 107/1000
-60000/60000 - 6s - loss: 0.0618 - accuracy: 0.9842 - val_loss: 0.1967 - val_accuracy: 0.9456
-Epoch 108/1000
-60000/60000 - 6s - loss: 0.0615 - accuracy: 0.9842 - val_loss: 0.1989 - val_accuracy: 0.9457
-Epoch 109/1000
-60000/60000 - 6s - loss: 0.0610 - accuracy: 0.9845 - val_loss: 0.1980 - val_accuracy: 0.9455
-Epoch 110/1000
-60000/60000 - 6s - loss: 0.0605 - accuracy: 0.9848 - val_loss: 0.1977 - val_accuracy: 0.9459
-Epoch 111/1000
-60000/60000 - 6s - loss: 0.0602 - accuracy: 0.9850 - val_loss: 0.2003 - val_accuracy: 0.9459
-Epoch 112/1000
-60000/60000 - 6s - loss: 0.0599 - accuracy: 0.9852 - val_loss: 0.1989 - val_accuracy: 0.9456
-Epoch 113/1000
-60000/60000 - 6s - loss: 0.0592 - accuracy: 0.9852 - val_loss: 0.2008 - val_accuracy: 0.9451
-Epoch 114/1000
-60000/60000 - 6s - loss: 0.0587 - accuracy: 0.9855 - val_loss: 0.1997 - val_accuracy: 0.9461
-Epoch 115/1000
-60000/60000 - 7s - loss: 0.0584 - accuracy: 0.9854 - val_loss: 0.2018 - val_accuracy: 0.9448
-Epoch 116/1000
-60000/60000 - 6s - loss: 0.0580 - accuracy: 0.9859 - val_loss: 0.2004 - val_accuracy: 0.9452
-Epoch 117/1000
-60000/60000 - 6s - loss: 0.0578 - accuracy: 0.9858 - val_loss: 0.2032 - val_accuracy: 0.9446
-Epoch 118/1000
-60000/60000 - 6s - loss: 0.0573 - accuracy: 0.9861 - val_loss: 0.2016 - val_accuracy: 0.9463
-Epoch 119/1000
-60000/60000 - 7s - loss: 0.0569 - accuracy: 0.9861 - val_loss: 0.2030 - val_accuracy: 0.9458
-Epoch 120/1000
-60000/60000 - 6s - loss: 0.0565 - accuracy: 0.9861 - val_loss: 0.2018 - val_accuracy: 0.9459
-Epoch 121/1000
-60000/60000 - 6s - loss: 0.0563 - accuracy: 0.9864 - val_loss: 0.2031 - val_accuracy: 0.9454
-Epoch 122/1000
-60000/60000 - 6s - loss: 0.0558 - accuracy: 0.9864 - val_loss: 0.2037 - val_accuracy: 0.9458
-Epoch 123/1000
-60000/60000 - 7s - loss: 0.0554 - accuracy: 0.9869 - val_loss: 0.2042 - val_accuracy: 0.9449
-Epoch 124/1000
-60000/60000 - 7s - loss: 0.0551 - accuracy: 0.9870 - val_loss: 0.2060 - val_accuracy: 0.9454
-Epoch 125/1000
-60000/60000 - 7s - loss: 0.0546 - accuracy: 0.9871 - val_loss: 0.2045 - val_accuracy: 0.9449
-Epoch 126/1000
-60000/60000 - 7s - loss: 0.0544 - accuracy: 0.9872 - val_loss: 0.2069 - val_accuracy: 0.9456
-Epoch 127/1000
-60000/60000 - 6s - loss: 0.0540 - accuracy: 0.9872 - val_loss: 0.2049 - val_accuracy: 0.9449
-Epoch 128/1000
-60000/60000 - 6s - loss: 0.0536 - accuracy: 0.9875 - val_loss: 0.2060 - val_accuracy: 0.9448
-Epoch 129/1000
-60000/60000 - 6s - loss: 0.0534 - accuracy: 0.9875 - val_loss: 0.2068 - val_accuracy: 0.9446
-Epoch 130/1000
-60000/60000 - 6s - loss: 0.0529 - accuracy: 0.9875 - val_loss: 0.2060 - val_accuracy: 0.9454
-Epoch 131/1000
-60000/60000 - 6s - loss: 0.0528 - accuracy: 0.9877 - val_loss: 0.2070 - val_accuracy: 0.9446
-Epoch 132/1000
-60000/60000 - 6s - loss: 0.0523 - accuracy: 0.9880 - val_loss: 0.2076 - val_accuracy: 0.9447
-Epoch 133/1000
-60000/60000 - 6s - loss: 0.0519 - accuracy: 0.9880 - val_loss: 0.2086 - val_accuracy: 0.9455
-Epoch 134/1000
-60000/60000 - 6s - loss: 0.0516 - accuracy: 0.9880 - val_loss: 0.2088 - val_accuracy: 0.9440
-Epoch 135/1000
-60000/60000 - 6s - loss: 0.0512 - accuracy: 0.9884 - val_loss: 0.2099 - val_accuracy: 0.9439
-Epoch 136/1000
-60000/60000 - 6s - loss: 0.0510 - accuracy: 0.9883 - val_loss: 0.2101 - val_accuracy: 0.9441
-Epoch 137/1000
-60000/60000 - 6s - loss: 0.0508 - accuracy: 0.9887 - val_loss: 0.2080 - val_accuracy: 0.9451
-Epoch 138/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9888 - val_loss: 0.2104 - val_accuracy: 0.9442
-Epoch 139/1000
-60000/60000 - 6s - loss: 0.0501 - accuracy: 0.9887 - val_loss: 0.2107 - val_accuracy: 0.9450
-Epoch 140/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9888 - val_loss: 0.2108 - val_accuracy: 0.9443
-Epoch 141/1000
-60000/60000 - 6s - loss: 0.0495 - accuracy: 0.9888 - val_loss: 0.2112 - val_accuracy: 0.9442
-Epoch 142/1000
-60000/60000 - 6s - loss: 0.0491 - accuracy: 0.9891 - val_loss: 0.2119 - val_accuracy: 0.9445
-Epoch 143/1000
-60000/60000 - 6s - loss: 0.0489 - accuracy: 0.9888 - val_loss: 0.2118 - val_accuracy: 0.9454
-Epoch 144/1000
-60000/60000 - 6s - loss: 0.0486 - accuracy: 0.9891 - val_loss: 0.2138 - val_accuracy: 0.9439
-Epoch 145/1000
-60000/60000 - 6s - loss: 0.0482 - accuracy: 0.9892 - val_loss: 0.2140 - val_accuracy: 0.9430
-Epoch 146/1000
-60000/60000 - 6s - loss: 0.0480 - accuracy: 0.9893 - val_loss: 0.2134 - val_accuracy: 0.9445
-Epoch 147/1000
-60000/60000 - 6s - loss: 0.0477 - accuracy: 0.9898 - val_loss: 0.2148 - val_accuracy: 0.9439
-Epoch 148/1000
-60000/60000 - 6s - loss: 0.0474 - accuracy: 0.9895 - val_loss: 0.2148 - val_accuracy: 0.9448
-Epoch 149/1000
-60000/60000 - 6s - loss: 0.0472 - accuracy: 0.9897 - val_loss: 0.2156 - val_accuracy: 0.9439
-Epoch 150/1000
-60000/60000 - 6s - loss: 0.0469 - accuracy: 0.9898 - val_loss: 0.2158 - val_accuracy: 0.9432
-Epoch 151/1000
-60000/60000 - 6s - loss: 0.0467 - accuracy: 0.9898 - val_loss: 0.2154 - val_accuracy: 0.9437
-Epoch 152/1000
-60000/60000 - 6s - loss: 0.0463 - accuracy: 0.9901 - val_loss: 0.2177 - val_accuracy: 0.9430
-Epoch 153/1000
-60000/60000 - 6s - loss: 0.0463 - accuracy: 0.9899 - val_loss: 0.2162 - val_accuracy: 0.9440
-Epoch 154/1000
-60000/60000 - 6s - loss: 0.0459 - accuracy: 0.9902 - val_loss: 0.2164 - val_accuracy: 0.9433
-Epoch 155/1000
-60000/60000 - 6s - loss: 0.0456 - accuracy: 0.9901 - val_loss: 0.2167 - val_accuracy: 0.9425
-Epoch 156/1000
-60000/60000 - 6s - loss: 0.0453 - accuracy: 0.9904 - val_loss: 0.2184 - val_accuracy: 0.9431
-Epoch 157/1000
-60000/60000 - 6s - loss: 0.0452 - accuracy: 0.9906 - val_loss: 0.2185 - val_accuracy: 0.9430
-Epoch 158/1000
-60000/60000 - 6s - loss: 0.0448 - accuracy: 0.9906 - val_loss: 0.2181 - val_accuracy: 0.9429
-Epoch 159/1000
-60000/60000 - 6s - loss: 0.0445 - accuracy: 0.9905 - val_loss: 0.2197 - val_accuracy: 0.9427
-Epoch 160/1000
-60000/60000 - 6s - loss: 0.0444 - accuracy: 0.9905 - val_loss: 0.2215 - val_accuracy: 0.9427
-Epoch 161/1000
-60000/60000 - 6s - loss: 0.0441 - accuracy: 0.9907 - val_loss: 0.2200 - val_accuracy: 0.9427
-Epoch 162/1000
-60000/60000 - 6s - loss: 0.0438 - accuracy: 0.9913 - val_loss: 0.2215 - val_accuracy: 0.9423
-Epoch 163/1000
-60000/60000 - 6s - loss: 0.0436 - accuracy: 0.9908 - val_loss: 0.2194 - val_accuracy: 0.9437
-Epoch 164/1000
-60000/60000 - 6s - loss: 0.0434 - accuracy: 0.9911 - val_loss: 0.2210 - val_accuracy: 0.9426
-Epoch 165/1000
-60000/60000 - 6s - loss: 0.0431 - accuracy: 0.9913 - val_loss: 0.2225 - val_accuracy: 0.9421
-Epoch 166/1000
-60000/60000 - 6s - loss: 0.0429 - accuracy: 0.9915 - val_loss: 0.2234 - val_accuracy: 0.9427
-Epoch 167/1000
-60000/60000 - 6s - loss: 0.0426 - accuracy: 0.9914 - val_loss: 0.2221 - val_accuracy: 0.9427
-Epoch 168/1000
-60000/60000 - 6s - loss: 0.0424 - accuracy: 0.9913 - val_loss: 0.2233 - val_accuracy: 0.9430
-Epoch 169/1000
-60000/60000 - 6s - loss: 0.0422 - accuracy: 0.9915 - val_loss: 0.2239 - val_accuracy: 0.9422
-Epoch 170/1000
-60000/60000 - 6s - loss: 0.0419 - accuracy: 0.9917 - val_loss: 0.2250 - val_accuracy: 0.9410
-Epoch 171/1000
-60000/60000 - 6s - loss: 0.0417 - accuracy: 0.9917 - val_loss: 0.2243 - val_accuracy: 0.9422
-Epoch 172/1000
-60000/60000 - 6s - loss: 0.0414 - accuracy: 0.9916 - val_loss: 0.2259 - val_accuracy: 0.9419
-Epoch 173/1000
-60000/60000 - 6s - loss: 0.0412 - accuracy: 0.9917 - val_loss: 0.2238 - val_accuracy: 0.9425
-Epoch 174/1000
-60000/60000 - 6s - loss: 0.0411 - accuracy: 0.9919 - val_loss: 0.2243 - val_accuracy: 0.9433
-Epoch 175/1000
-60000/60000 - 6s - loss: 0.0409 - accuracy: 0.9918 - val_loss: 0.2258 - val_accuracy: 0.9414
-Epoch 176/1000
-60000/60000 - 6s - loss: 0.0407 - accuracy: 0.9920 - val_loss: 0.2265 - val_accuracy: 0.9426
-Epoch 177/1000
-60000/60000 - 6s - loss: 0.0404 - accuracy: 0.9919 - val_loss: 0.2276 - val_accuracy: 0.9419
-Epoch 178/1000
-60000/60000 - 6s - loss: 0.0402 - accuracy: 0.9920 - val_loss: 0.2272 - val_accuracy: 0.9428
-Epoch 179/1000
-60000/60000 - 6s - loss: 0.0401 - accuracy: 0.9920 - val_loss: 0.2274 - val_accuracy: 0.9422
-Epoch 180/1000
-60000/60000 - 6s - loss: 0.0397 - accuracy: 0.9923 - val_loss: 0.2291 - val_accuracy: 0.9416
-Epoch 181/1000
-60000/60000 - 6s - loss: 0.0395 - accuracy: 0.9924 - val_loss: 0.2287 - val_accuracy: 0.9413
-Epoch 182/1000
-60000/60000 - 6s - loss: 0.0394 - accuracy: 0.9923 - val_loss: 0.2303 - val_accuracy: 0.9413
-Epoch 183/1000
-60000/60000 - 6s - loss: 0.0391 - accuracy: 0.9925 - val_loss: 0.2290 - val_accuracy: 0.9428
-Epoch 184/1000
-60000/60000 - 6s - loss: 0.0390 - accuracy: 0.9923 - val_loss: 0.2298 - val_accuracy: 0.9425
-Epoch 185/1000
-60000/60000 - 6s - loss: 0.0388 - accuracy: 0.9926 - val_loss: 0.2312 - val_accuracy: 0.9411
-Epoch 186/1000
-60000/60000 - 6s - loss: 0.0385 - accuracy: 0.9928 - val_loss: 0.2322 - val_accuracy: 0.9406
-Epoch 187/1000
-60000/60000 - 6s - loss: 0.0383 - accuracy: 0.9927 - val_loss: 0.2320 - val_accuracy: 0.9409
-Epoch 188/1000
-60000/60000 - 6s - loss: 0.0381 - accuracy: 0.9928 - val_loss: 0.2326 - val_accuracy: 0.9416
-Epoch 189/1000
-60000/60000 - 6s - loss: 0.0379 - accuracy: 0.9930 - val_loss: 0.2331 - val_accuracy: 0.9416
-Epoch 190/1000
-60000/60000 - 6s - loss: 0.0377 - accuracy: 0.9930 - val_loss: 0.2325 - val_accuracy: 0.9417
-Epoch 191/1000
-60000/60000 - 6s - loss: 0.0375 - accuracy: 0.9929 - val_loss: 0.2351 - val_accuracy: 0.9407
-Epoch 192/1000
-60000/60000 - 6s - loss: 0.0374 - accuracy: 0.9929 - val_loss: 0.2348 - val_accuracy: 0.9413
-Epoch 193/1000
-60000/60000 - 6s - loss: 0.0372 - accuracy: 0.9930 - val_loss: 0.2347 - val_accuracy: 0.9405
-Epoch 194/1000
-60000/60000 - 6s - loss: 0.0370 - accuracy: 0.9929 - val_loss: 0.2356 - val_accuracy: 0.9410
-Epoch 195/1000
-60000/60000 - 6s - loss: 0.0368 - accuracy: 0.9930 - val_loss: 0.2349 - val_accuracy: 0.9405
-Epoch 196/1000
-60000/60000 - 6s - loss: 0.0366 - accuracy: 0.9933 - val_loss: 0.2359 - val_accuracy: 0.9417
-Epoch 197/1000
-60000/60000 - 6s - loss: 0.0362 - accuracy: 0.9933 - val_loss: 0.2377 - val_accuracy: 0.9407
-Epoch 198/1000
-60000/60000 - 6s - loss: 0.0363 - accuracy: 0.9933 - val_loss: 0.2354 - val_accuracy: 0.9412
-Epoch 199/1000
-60000/60000 - 6s - loss: 0.0360 - accuracy: 0.9935 - val_loss: 0.2370 - val_accuracy: 0.9407
-Epoch 200/1000
-60000/60000 - 6s - loss: 0.0359 - accuracy: 0.9935 - val_loss: 0.2372 - val_accuracy: 0.9408
-Epoch 201/1000
-60000/60000 - 6s - loss: 0.0357 - accuracy: 0.9936 - val_loss: 0.2381 - val_accuracy: 0.9417
-Epoch 202/1000
-60000/60000 - 6s - loss: 0.0355 - accuracy: 0.9934 - val_loss: 0.2382 - val_accuracy: 0.9403
-Epoch 203/1000
-60000/60000 - 6s - loss: 0.0353 - accuracy: 0.9937 - val_loss: 0.2398 - val_accuracy: 0.9408
-Epoch 204/1000
-60000/60000 - 6s - loss: 0.0351 - accuracy: 0.9937 - val_loss: 0.2381 - val_accuracy: 0.9405
-Epoch 205/1000
-60000/60000 - 6s - loss: 0.0349 - accuracy: 0.9938 - val_loss: 0.2396 - val_accuracy: 0.9407
-Epoch 206/1000
-60000/60000 - 6s - loss: 0.0348 - accuracy: 0.9934 - val_loss: 0.2391 - val_accuracy: 0.9409
-Epoch 207/1000
-60000/60000 - 6s - loss: 0.0347 - accuracy: 0.9938 - val_loss: 0.2399 - val_accuracy: 0.9417
-Epoch 208/1000
-60000/60000 - 6s - loss: 0.0345 - accuracy: 0.9940 - val_loss: 0.2406 - val_accuracy: 0.9408
-Epoch 209/1000
-60000/60000 - 6s - loss: 0.0343 - accuracy: 0.9940 - val_loss: 0.2412 - val_accuracy: 0.9411
-Epoch 210/1000
-60000/60000 - 6s - loss: 0.0342 - accuracy: 0.9941 - val_loss: 0.2424 - val_accuracy: 0.9403
-Epoch 211/1000
-60000/60000 - 6s - loss: 0.0339 - accuracy: 0.9938 - val_loss: 0.2421 - val_accuracy: 0.9406
-Epoch 212/1000
-60000/60000 - 6s - loss: 0.0338 - accuracy: 0.9942 - val_loss: 0.2420 - val_accuracy: 0.9399
-Epoch 213/1000
-60000/60000 - 6s - loss: 0.0335 - accuracy: 0.9941 - val_loss: 0.2441 - val_accuracy: 0.9399
-Epoch 214/1000
-60000/60000 - 6s - loss: 0.0334 - accuracy: 0.9942 - val_loss: 0.2444 - val_accuracy: 0.9396
-Epoch 215/1000
-60000/60000 - 6s - loss: 0.0333 - accuracy: 0.9942 - val_loss: 0.2441 - val_accuracy: 0.9402
-Epoch 216/1000
-60000/60000 - 6s - loss: 0.0331 - accuracy: 0.9943 - val_loss: 0.2452 - val_accuracy: 0.9400
-Epoch 217/1000
-60000/60000 - 6s - loss: 0.0329 - accuracy: 0.9944 - val_loss: 0.2443 - val_accuracy: 0.9407
-Epoch 218/1000
-60000/60000 - 6s - loss: 0.0327 - accuracy: 0.9942 - val_loss: 0.2456 - val_accuracy: 0.9404
-Epoch 219/1000
-60000/60000 - 6s - loss: 0.0326 - accuracy: 0.9944 - val_loss: 0.2462 - val_accuracy: 0.9405
-Epoch 220/1000
-60000/60000 - 6s - loss: 0.0325 - accuracy: 0.9946 - val_loss: 0.2468 - val_accuracy: 0.9395
-Epoch 221/1000
-60000/60000 - 6s - loss: 0.0324 - accuracy: 0.9944 - val_loss: 0.2467 - val_accuracy: 0.9404
-Epoch 222/1000
-60000/60000 - 6s - loss: 0.0322 - accuracy: 0.9945 - val_loss: 0.2465 - val_accuracy: 0.9397
-Epoch 223/1000
-60000/60000 - 6s - loss: 0.0320 - accuracy: 0.9948 - val_loss: 0.2482 - val_accuracy: 0.9399
-Epoch 224/1000
-60000/60000 - 6s - loss: 0.0319 - accuracy: 0.9945 - val_loss: 0.2483 - val_accuracy: 0.9396
-Epoch 225/1000
-60000/60000 - 6s - loss: 0.0317 - accuracy: 0.9945 - val_loss: 0.2490 - val_accuracy: 0.9390
-Epoch 226/1000
-60000/60000 - 6s - loss: 0.0316 - accuracy: 0.9946 - val_loss: 0.2492 - val_accuracy: 0.9403
-Epoch 227/1000
-60000/60000 - 6s - loss: 0.0314 - accuracy: 0.9946 - val_loss: 0.2506 - val_accuracy: 0.9404
-Epoch 228/1000
-60000/60000 - 6s - loss: 0.0313 - accuracy: 0.9951 - val_loss: 0.2499 - val_accuracy: 0.9392
-Epoch 229/1000
-60000/60000 - 6s - loss: 0.0311 - accuracy: 0.9950 - val_loss: 0.2494 - val_accuracy: 0.9399
-Epoch 230/1000
-60000/60000 - 6s - loss: 0.0310 - accuracy: 0.9950 - val_loss: 0.2506 - val_accuracy: 0.9395
-Epoch 231/1000
-60000/60000 - 6s - loss: 0.0308 - accuracy: 0.9951 - val_loss: 0.2508 - val_accuracy: 0.9399
-Epoch 232/1000
-60000/60000 - 6s - loss: 0.0307 - accuracy: 0.9949 - val_loss: 0.2515 - val_accuracy: 0.9390
-Epoch 233/1000
-60000/60000 - 6s - loss: 0.0305 - accuracy: 0.9950 - val_loss: 0.2519 - val_accuracy: 0.9394
-Epoch 234/1000
-60000/60000 - 6s - loss: 0.0304 - accuracy: 0.9952 - val_loss: 0.2530 - val_accuracy: 0.9394
-Epoch 235/1000
-60000/60000 - 6s - loss: 0.0303 - accuracy: 0.9950 - val_loss: 0.2526 - val_accuracy: 0.9397
-Epoch 236/1000
-60000/60000 - 6s - loss: 0.0300 - accuracy: 0.9953 - val_loss: 0.2539 - val_accuracy: 0.9395
-Epoch 237/1000
-60000/60000 - 6s - loss: 0.0300 - accuracy: 0.9950 - val_loss: 0.2536 - val_accuracy: 0.9395
-Epoch 238/1000
-60000/60000 - 6s - loss: 0.0298 - accuracy: 0.9951 - val_loss: 0.2555 - val_accuracy: 0.9394
-Epoch 239/1000
-60000/60000 - 6s - loss: 0.0298 - accuracy: 0.9953 - val_loss: 0.2548 - val_accuracy: 0.9388
-Epoch 240/1000
-60000/60000 - 6s - loss: 0.0295 - accuracy: 0.9952 - val_loss: 0.2570 - val_accuracy: 0.9386
-Epoch 241/1000
-60000/60000 - 6s - loss: 0.0295 - accuracy: 0.9954 - val_loss: 0.2561 - val_accuracy: 0.9387
-Epoch 242/1000
-60000/60000 - 6s - loss: 0.0293 - accuracy: 0.9952 - val_loss: 0.2550 - val_accuracy: 0.9410
-Epoch 243/1000
-60000/60000 - 6s - loss: 0.0292 - accuracy: 0.9955 - val_loss: 0.2573 - val_accuracy: 0.9396
-Epoch 244/1000
-60000/60000 - 6s - loss: 0.0290 - accuracy: 0.9954 - val_loss: 0.2558 - val_accuracy: 0.9396
-Epoch 245/1000
-60000/60000 - 6s - loss: 0.0290 - accuracy: 0.9953 - val_loss: 0.2580 - val_accuracy: 0.9396
-Epoch 246/1000
-60000/60000 - 6s - loss: 0.0287 - accuracy: 0.9956 - val_loss: 0.2587 - val_accuracy: 0.9389
-Epoch 247/1000
-60000/60000 - 6s - loss: 0.0287 - accuracy: 0.9956 - val_loss: 0.2592 - val_accuracy: 0.9385
-Epoch 248/1000
-60000/60000 - 6s - loss: 0.0286 - accuracy: 0.9956 - val_loss: 0.2588 - val_accuracy: 0.9394
-Epoch 249/1000
-60000/60000 - 6s - loss: 0.0284 - accuracy: 0.9958 - val_loss: 0.2578 - val_accuracy: 0.9388
-Epoch 250/1000
-60000/60000 - 6s - loss: 0.0282 - accuracy: 0.9958 - val_loss: 0.2604 - val_accuracy: 0.9389
-Epoch 251/1000
-60000/60000 - 6s - loss: 0.0282 - accuracy: 0.9958 - val_loss: 0.2589 - val_accuracy: 0.9398
-Epoch 252/1000
-60000/60000 - 6s - loss: 0.0280 - accuracy: 0.9959 - val_loss: 0.2608 - val_accuracy: 0.9396
-Epoch 253/1000
-60000/60000 - 6s - loss: 0.0279 - accuracy: 0.9958 - val_loss: 0.2605 - val_accuracy: 0.9386
-Epoch 254/1000
-60000/60000 - 6s - loss: 0.0278 - accuracy: 0.9957 - val_loss: 0.2625 - val_accuracy: 0.9394
-Epoch 255/1000
-60000/60000 - 6s - loss: 0.0276 - accuracy: 0.9958 - val_loss: 0.2624 - val_accuracy: 0.9394
-Epoch 256/1000
-60000/60000 - 6s - loss: 0.0276 - accuracy: 0.9960 - val_loss: 0.2620 - val_accuracy: 0.9383
-Epoch 257/1000
-60000/60000 - 6s - loss: 0.0274 - accuracy: 0.9960 - val_loss: 0.2619 - val_accuracy: 0.9386
-Epoch 258/1000
-60000/60000 - 6s - loss: 0.0273 - accuracy: 0.9959 - val_loss: 0.2624 - val_accuracy: 0.9391
-Epoch 259/1000
-60000/60000 - 6s - loss: 0.0271 - accuracy: 0.9962 - val_loss: 0.2662 - val_accuracy: 0.9397
-Epoch 260/1000
-60000/60000 - 6s - loss: 0.0271 - accuracy: 0.9959 - val_loss: 0.2648 - val_accuracy: 0.9379
-Epoch 261/1000
-60000/60000 - 6s - loss: 0.0270 - accuracy: 0.9961 - val_loss: 0.2639 - val_accuracy: 0.9390
-Epoch 262/1000
-60000/60000 - 6s - loss: 0.0268 - accuracy: 0.9961 - val_loss: 0.2668 - val_accuracy: 0.9379
-Epoch 263/1000
-60000/60000 - 6s - loss: 0.0267 - accuracy: 0.9962 - val_loss: 0.2656 - val_accuracy: 0.9381
-Epoch 264/1000
-60000/60000 - 6s - loss: 0.0265 - accuracy: 0.9963 - val_loss: 0.2651 - val_accuracy: 0.9385
-Epoch 265/1000
-60000/60000 - 6s - loss: 0.0265 - accuracy: 0.9962 - val_loss: 0.2664 - val_accuracy: 0.9386
-Epoch 266/1000
-60000/60000 - 6s - loss: 0.0264 - accuracy: 0.9963 - val_loss: 0.2667 - val_accuracy: 0.9386
-Epoch 267/1000
-60000/60000 - 6s - loss: 0.0263 - accuracy: 0.9961 - val_loss: 0.2669 - val_accuracy: 0.9380
-Epoch 268/1000
-60000/60000 - 6s - loss: 0.0261 - accuracy: 0.9964 - val_loss: 0.2669 - val_accuracy: 0.9382
-Epoch 269/1000
-60000/60000 - 6s - loss: 0.0260 - accuracy: 0.9964 - val_loss: 0.2687 - val_accuracy: 0.9380
-Epoch 270/1000
-60000/60000 - 6s - loss: 0.0259 - accuracy: 0.9963 - val_loss: 0.2688 - val_accuracy: 0.9383
-Epoch 271/1000
-60000/60000 - 6s - loss: 0.0258 - accuracy: 0.9966 - val_loss: 0.2699 - val_accuracy: 0.9376
-Epoch 272/1000
-60000/60000 - 6s - loss: 0.0256 - accuracy: 0.9964 - val_loss: 0.2682 - val_accuracy: 0.9387
-Epoch 273/1000
-60000/60000 - 6s - loss: 0.0255 - accuracy: 0.9965 - val_loss: 0.2690 - val_accuracy: 0.9390
-Epoch 274/1000
-60000/60000 - 6s - loss: 0.0255 - accuracy: 0.9966 - val_loss: 0.2699 - val_accuracy: 0.9387
-Epoch 275/1000
-60000/60000 - 6s - loss: 0.0254 - accuracy: 0.9965 - val_loss: 0.2694 - val_accuracy: 0.9384
-Epoch 276/1000
-60000/60000 - 6s - loss: 0.0253 - accuracy: 0.9964 - val_loss: 0.2709 - val_accuracy: 0.9378
-Epoch 277/1000
-60000/60000 - 6s - loss: 0.0251 - accuracy: 0.9968 - val_loss: 0.2723 - val_accuracy: 0.9379
-Epoch 278/1000
-60000/60000 - 6s - loss: 0.0251 - accuracy: 0.9966 - val_loss: 0.2723 - val_accuracy: 0.9375
-Epoch 279/1000
-60000/60000 - 6s - loss: 0.0250 - accuracy: 0.9966 - val_loss: 0.2713 - val_accuracy: 0.9379
-Epoch 280/1000
-60000/60000 - 6s - loss: 0.0249 - accuracy: 0.9967 - val_loss: 0.2735 - val_accuracy: 0.9379
-Epoch 281/1000
-60000/60000 - 6s - loss: 0.0247 - accuracy: 0.9966 - val_loss: 0.2752 - val_accuracy: 0.9375
-Epoch 282/1000
-60000/60000 - 6s - loss: 0.0246 - accuracy: 0.9968 - val_loss: 0.2744 - val_accuracy: 0.9381
-Epoch 283/1000
-60000/60000 - 6s - loss: 0.0245 - accuracy: 0.9966 - val_loss: 0.2749 - val_accuracy: 0.9370
-Epoch 284/1000
-60000/60000 - 6s - loss: 0.0245 - accuracy: 0.9968 - val_loss: 0.2735 - val_accuracy: 0.9374
-Epoch 285/1000
-60000/60000 - 6s - loss: 0.0244 - accuracy: 0.9968 - val_loss: 0.2744 - val_accuracy: 0.9379
-Epoch 286/1000
-60000/60000 - 6s - loss: 0.0243 - accuracy: 0.9969 - val_loss: 0.2755 - val_accuracy: 0.9375
-Epoch 287/1000
-60000/60000 - 6s - loss: 0.0242 - accuracy: 0.9967 - val_loss: 0.2756 - val_accuracy: 0.9376
-Epoch 288/1000
-60000/60000 - 6s - loss: 0.0241 - accuracy: 0.9970 - val_loss: 0.2768 - val_accuracy: 0.9373
-Epoch 289/1000
-60000/60000 - 6s - loss: 0.0239 - accuracy: 0.9970 - val_loss: 0.2775 - val_accuracy: 0.9383
-Epoch 290/1000
-60000/60000 - 6s - loss: 0.0239 - accuracy: 0.9969 - val_loss: 0.2769 - val_accuracy: 0.9375
-Epoch 291/1000
-60000/60000 - 6s - loss: 0.0237 - accuracy: 0.9970 - val_loss: 0.2770 - val_accuracy: 0.9378
-Epoch 292/1000
-60000/60000 - 6s - loss: 0.0237 - accuracy: 0.9969 - val_loss: 0.2780 - val_accuracy: 0.9369
-Epoch 293/1000
-60000/60000 - 6s - loss: 0.0236 - accuracy: 0.9972 - val_loss: 0.2779 - val_accuracy: 0.9378
-Epoch 294/1000
-60000/60000 - 6s - loss: 0.0235 - accuracy: 0.9969 - val_loss: 0.2788 - val_accuracy: 0.9365
-Epoch 295/1000
-60000/60000 - 6s - loss: 0.0234 - accuracy: 0.9972 - val_loss: 0.2785 - val_accuracy: 0.9370
-Epoch 296/1000
-60000/60000 - 6s - loss: 0.0233 - accuracy: 0.9970 - val_loss: 0.2782 - val_accuracy: 0.9371
-Epoch 297/1000
-60000/60000 - 6s - loss: 0.0233 - accuracy: 0.9971 - val_loss: 0.2789 - val_accuracy: 0.9366
-Epoch 298/1000
-60000/60000 - 6s - loss: 0.0231 - accuracy: 0.9971 - val_loss: 0.2805 - val_accuracy: 0.9372
-Epoch 299/1000
-60000/60000 - 6s - loss: 0.0231 - accuracy: 0.9972 - val_loss: 0.2791 - val_accuracy: 0.9370
-Epoch 300/1000
-60000/60000 - 6s - loss: 0.0230 - accuracy: 0.9971 - val_loss: 0.2811 - val_accuracy: 0.9368
-Epoch 301/1000
-60000/60000 - 6s - loss: 0.0229 - accuracy: 0.9972 - val_loss: 0.2822 - val_accuracy: 0.9376
-Epoch 302/1000
-60000/60000 - 6s - loss: 0.0227 - accuracy: 0.9972 - val_loss: 0.2807 - val_accuracy: 0.9373
-Epoch 303/1000
-60000/60000 - 6s - loss: 0.0227 - accuracy: 0.9973 - val_loss: 0.2832 - val_accuracy: 0.9369
-Epoch 304/1000
-60000/60000 - 6s - loss: 0.0227 - accuracy: 0.9972 - val_loss: 0.2815 - val_accuracy: 0.9371
-Epoch 305/1000
-60000/60000 - 6s - loss: 0.0225 - accuracy: 0.9973 - val_loss: 0.2814 - val_accuracy: 0.9365
-Epoch 306/1000
-60000/60000 - 6s - loss: 0.0224 - accuracy: 0.9973 - val_loss: 0.2828 - val_accuracy: 0.9372
-Epoch 307/1000
-60000/60000 - 6s - loss: 0.0224 - accuracy: 0.9973 - val_loss: 0.2830 - val_accuracy: 0.9374
-Epoch 308/1000
-60000/60000 - 6s - loss: 0.0222 - accuracy: 0.9974 - val_loss: 0.2835 - val_accuracy: 0.9369
-Epoch 309/1000
-60000/60000 - 6s - loss: 0.0222 - accuracy: 0.9975 - val_loss: 0.2841 - val_accuracy: 0.9367
-Epoch 310/1000
-60000/60000 - 6s - loss: 0.0221 - accuracy: 0.9974 - val_loss: 0.2843 - val_accuracy: 0.9374
-Epoch 311/1000
-60000/60000 - 6s - loss: 0.0221 - accuracy: 0.9974 - val_loss: 0.2843 - val_accuracy: 0.9367
-Epoch 312/1000
-60000/60000 - 6s - loss: 0.0219 - accuracy: 0.9973 - val_loss: 0.2846 - val_accuracy: 0.9370
-Epoch 313/1000
-60000/60000 - 7s - loss: 0.0218 - accuracy: 0.9973 - val_loss: 0.2865 - val_accuracy: 0.9366
-Epoch 314/1000
-60000/60000 - 7s - loss: 0.0218 - accuracy: 0.9973 - val_loss: 0.2859 - val_accuracy: 0.9372
-Epoch 315/1000
-60000/60000 - 7s - loss: 0.0217 - accuracy: 0.9975 - val_loss: 0.2873 - val_accuracy: 0.9360
-Epoch 316/1000
-60000/60000 - 7s - loss: 0.0216 - accuracy: 0.9976 - val_loss: 0.2858 - val_accuracy: 0.9372
-Epoch 317/1000
-60000/60000 - 7s - loss: 0.0215 - accuracy: 0.9975 - val_loss: 0.2879 - val_accuracy: 0.9363
-Epoch 318/1000
-60000/60000 - 6s - loss: 0.0214 - accuracy: 0.9975 - val_loss: 0.2869 - val_accuracy: 0.9366
-Epoch 319/1000
-60000/60000 - 7s - loss: 0.0214 - accuracy: 0.9976 - val_loss: 0.2893 - val_accuracy: 0.9360
-Epoch 320/1000
-60000/60000 - 6s - loss: 0.0213 - accuracy: 0.9975 - val_loss: 0.2886 - val_accuracy: 0.9365
-Epoch 321/1000
-60000/60000 - 6s - loss: 0.0212 - accuracy: 0.9976 - val_loss: 0.2892 - val_accuracy: 0.9368
-Epoch 322/1000
-60000/60000 - 7s - loss: 0.0212 - accuracy: 0.9977 - val_loss: 0.2900 - val_accuracy: 0.9357
-Epoch 323/1000
-60000/60000 - 6s - loss: 0.0211 - accuracy: 0.9975 - val_loss: 0.2897 - val_accuracy: 0.9368
-Epoch 324/1000
-60000/60000 - 6s - loss: 0.0210 - accuracy: 0.9977 - val_loss: 0.2902 - val_accuracy: 0.9359
-Epoch 325/1000
-60000/60000 - 7s - loss: 0.0209 - accuracy: 0.9976 - val_loss: 0.2906 - val_accuracy: 0.9364
-Epoch 326/1000
-60000/60000 - 6s - loss: 0.0208 - accuracy: 0.9977 - val_loss: 0.2916 - val_accuracy: 0.9358
-Epoch 327/1000
-60000/60000 - 6s - loss: 0.0208 - accuracy: 0.9977 - val_loss: 0.2925 - val_accuracy: 0.9353
-Epoch 328/1000
-60000/60000 - 7s - loss: 0.0207 - accuracy: 0.9976 - val_loss: 0.2919 - val_accuracy: 0.9363
-Epoch 329/1000
-60000/60000 - 6s - loss: 0.0207 - accuracy: 0.9977 - val_loss: 0.2917 - val_accuracy: 0.9366
-Epoch 330/1000
-60000/60000 - 6s - loss: 0.0205 - accuracy: 0.9978 - val_loss: 0.2938 - val_accuracy: 0.9360
-Epoch 331/1000
-60000/60000 - 7s - loss: 0.0204 - accuracy: 0.9977 - val_loss: 0.2919 - val_accuracy: 0.9362
-Epoch 332/1000
-60000/60000 - 7s - loss: 0.0204 - accuracy: 0.9977 - val_loss: 0.2929 - val_accuracy: 0.9364
-Epoch 333/1000
-60000/60000 - 7s - loss: 0.0203 - accuracy: 0.9977 - val_loss: 0.2930 - val_accuracy: 0.9362
-Epoch 334/1000
-60000/60000 - 7s - loss: 0.0202 - accuracy: 0.9977 - val_loss: 0.2943 - val_accuracy: 0.9360
-Epoch 335/1000
-60000/60000 - 7s - loss: 0.0202 - accuracy: 0.9978 - val_loss: 0.2949 - val_accuracy: 0.9361
-Epoch 336/1000
-60000/60000 - 6s - loss: 0.0201 - accuracy: 0.9978 - val_loss: 0.2957 - val_accuracy: 0.9354
-Epoch 337/1000
-60000/60000 - 6s - loss: 0.0201 - accuracy: 0.9980 - val_loss: 0.2956 - val_accuracy: 0.9361
-Epoch 338/1000
-60000/60000 - 6s - loss: 0.0200 - accuracy: 0.9979 - val_loss: 0.2952 - val_accuracy: 0.9369
-Epoch 339/1000
-60000/60000 - 6s - loss: 0.0199 - accuracy: 0.9979 - val_loss: 0.2957 - val_accuracy: 0.9359
-Epoch 340/1000
-60000/60000 - 6s - loss: 0.0199 - accuracy: 0.9979 - val_loss: 0.2955 - val_accuracy: 0.9361
-Epoch 341/1000
-60000/60000 - 6s - loss: 0.0197 - accuracy: 0.9979 - val_loss: 0.2964 - val_accuracy: 0.9353
-Epoch 342/1000
-60000/60000 - 6s - loss: 0.0197 - accuracy: 0.9980 - val_loss: 0.2971 - val_accuracy: 0.9362
-Epoch 343/1000
-60000/60000 - 6s - loss: 0.0196 - accuracy: 0.9980 - val_loss: 0.2974 - val_accuracy: 0.9364
-Epoch 344/1000
-60000/60000 - 6s - loss: 0.0195 - accuracy: 0.9979 - val_loss: 0.2971 - val_accuracy: 0.9359
-Epoch 345/1000
-60000/60000 - 6s - loss: 0.0195 - accuracy: 0.9980 - val_loss: 0.2976 - val_accuracy: 0.9362
-Epoch 346/1000
-60000/60000 - 6s - loss: 0.0194 - accuracy: 0.9980 - val_loss: 0.2974 - val_accuracy: 0.9356
-Epoch 347/1000
-60000/60000 - 6s - loss: 0.0193 - accuracy: 0.9980 - val_loss: 0.2974 - val_accuracy: 0.9359
-Epoch 348/1000
-60000/60000 - 6s - loss: 0.0193 - accuracy: 0.9981 - val_loss: 0.2996 - val_accuracy: 0.9353
-Epoch 349/1000
-60000/60000 - 6s - loss: 0.0192 - accuracy: 0.9980 - val_loss: 0.2996 - val_accuracy: 0.9356
-Epoch 350/1000
-60000/60000 - 6s - loss: 0.0192 - accuracy: 0.9981 - val_loss: 0.2998 - val_accuracy: 0.9359
-Epoch 351/1000
-60000/60000 - 6s - loss: 0.0191 - accuracy: 0.9981 - val_loss: 0.3006 - val_accuracy: 0.9365
-Epoch 352/1000
-60000/60000 - 6s - loss: 0.0190 - accuracy: 0.9980 - val_loss: 0.3012 - val_accuracy: 0.9361
-Epoch 353/1000
-60000/60000 - 6s - loss: 0.0189 - accuracy: 0.9980 - val_loss: 0.2997 - val_accuracy: 0.9360
-Epoch 354/1000
-60000/60000 - 6s - loss: 0.0189 - accuracy: 0.9981 - val_loss: 0.3005 - val_accuracy: 0.9359
-Epoch 355/1000
-60000/60000 - 6s - loss: 0.0188 - accuracy: 0.9981 - val_loss: 0.3011 - val_accuracy: 0.9357
-Epoch 356/1000
-60000/60000 - 6s - loss: 0.0188 - accuracy: 0.9981 - val_loss: 0.3022 - val_accuracy: 0.9356
-Epoch 357/1000
-60000/60000 - 6s - loss: 0.0187 - accuracy: 0.9981 - val_loss: 0.3028 - val_accuracy: 0.9357
-Epoch 358/1000
-60000/60000 - 6s - loss: 0.0187 - accuracy: 0.9981 - val_loss: 0.3026 - val_accuracy: 0.9354
-Epoch 359/1000
-60000/60000 - 6s - loss: 0.0186 - accuracy: 0.9981 - val_loss: 0.3031 - val_accuracy: 0.9353
-Epoch 360/1000
-60000/60000 - 6s - loss: 0.0185 - accuracy: 0.9981 - val_loss: 0.3044 - val_accuracy: 0.9347
-Epoch 361/1000
-60000/60000 - 6s - loss: 0.0184 - accuracy: 0.9982 - val_loss: 0.3044 - val_accuracy: 0.9351
-Epoch 362/1000
-60000/60000 - 6s - loss: 0.0184 - accuracy: 0.9983 - val_loss: 0.3041 - val_accuracy: 0.9349
-Epoch 363/1000
-60000/60000 - 6s - loss: 0.0183 - accuracy: 0.9981 - val_loss: 0.3047 - val_accuracy: 0.9361
-Epoch 364/1000
-60000/60000 - 6s - loss: 0.0183 - accuracy: 0.9982 - val_loss: 0.3045 - val_accuracy: 0.9352
-Epoch 365/1000
-60000/60000 - 6s - loss: 0.0182 - accuracy: 0.9983 - val_loss: 0.3049 - val_accuracy: 0.9360
-Epoch 366/1000
-60000/60000 - 6s - loss: 0.0182 - accuracy: 0.9982 - val_loss: 0.3050 - val_accuracy: 0.9354
-Epoch 367/1000
-60000/60000 - 6s - loss: 0.0181 - accuracy: 0.9982 - val_loss: 0.3056 - val_accuracy: 0.9346
-Epoch 368/1000
-60000/60000 - 6s - loss: 0.0180 - accuracy: 0.9984 - val_loss: 0.3053 - val_accuracy: 0.9358
-Epoch 369/1000
-60000/60000 - 6s - loss: 0.0180 - accuracy: 0.9983 - val_loss: 0.3062 - val_accuracy: 0.9352
-Epoch 370/1000
-60000/60000 - 6s - loss: 0.0179 - accuracy: 0.9983 - val_loss: 0.3067 - val_accuracy: 0.9355
-Epoch 371/1000
-60000/60000 - 6s - loss: 0.0179 - accuracy: 0.9984 - val_loss: 0.3074 - val_accuracy: 0.9363
-Epoch 372/1000
-60000/60000 - 6s - loss: 0.0178 - accuracy: 0.9982 - val_loss: 0.3072 - val_accuracy: 0.9355
-Epoch 373/1000
-60000/60000 - 6s - loss: 0.0177 - accuracy: 0.9983 - val_loss: 0.3086 - val_accuracy: 0.9358
-Epoch 374/1000
-60000/60000 - 6s - loss: 0.0177 - accuracy: 0.9984 - val_loss: 0.3099 - val_accuracy: 0.9359
-Epoch 375/1000
-60000/60000 - 6s - loss: 0.0176 - accuracy: 0.9984 - val_loss: 0.3081 - val_accuracy: 0.9360
-Epoch 376/1000
-60000/60000 - 6s - loss: 0.0175 - accuracy: 0.9984 - val_loss: 0.3102 - val_accuracy: 0.9356
-Epoch 377/1000
-60000/60000 - 6s - loss: 0.0175 - accuracy: 0.9984 - val_loss: 0.3102 - val_accuracy: 0.9349
-Epoch 378/1000
-60000/60000 - 6s - loss: 0.0174 - accuracy: 0.9983 - val_loss: 0.3095 - val_accuracy: 0.9352
-Epoch 379/1000
-60000/60000 - 6s - loss: 0.0174 - accuracy: 0.9984 - val_loss: 0.3103 - val_accuracy: 0.9355
-Epoch 380/1000
-60000/60000 - 6s - loss: 0.0173 - accuracy: 0.9984 - val_loss: 0.3108 - val_accuracy: 0.9351
-Epoch 381/1000
-60000/60000 - 6s - loss: 0.0173 - accuracy: 0.9984 - val_loss: 0.3119 - val_accuracy: 0.9342
-Epoch 382/1000
-60000/60000 - 6s - loss: 0.0172 - accuracy: 0.9985 - val_loss: 0.3121 - val_accuracy: 0.9348
-Epoch 383/1000
-60000/60000 - 6s - loss: 0.0171 - accuracy: 0.9984 - val_loss: 0.3113 - val_accuracy: 0.9351
-Epoch 384/1000
-60000/60000 - 6s - loss: 0.0171 - accuracy: 0.9985 - val_loss: 0.3119 - val_accuracy: 0.9353
-Epoch 385/1000
-60000/60000 - 6s - loss: 0.0170 - accuracy: 0.9985 - val_loss: 0.3116 - val_accuracy: 0.9361
-Epoch 386/1000
-60000/60000 - 6s - loss: 0.0170 - accuracy: 0.9983 - val_loss: 0.3131 - val_accuracy: 0.9350
-Epoch 387/1000
-60000/60000 - 6s - loss: 0.0170 - accuracy: 0.9985 - val_loss: 0.3130 - val_accuracy: 0.9351
-Epoch 388/1000
-60000/60000 - 6s - loss: 0.0169 - accuracy: 0.9985 - val_loss: 0.3132 - val_accuracy: 0.9347
-Epoch 389/1000
-60000/60000 - 6s - loss: 0.0168 - accuracy: 0.9985 - val_loss: 0.3141 - val_accuracy: 0.9356
-Epoch 390/1000
-60000/60000 - 6s - loss: 0.0168 - accuracy: 0.9986 - val_loss: 0.3144 - val_accuracy: 0.9351
-Epoch 391/1000
-60000/60000 - 6s - loss: 0.0168 - accuracy: 0.9985 - val_loss: 0.3147 - val_accuracy: 0.9339
-Epoch 392/1000
-60000/60000 - 6s - loss: 0.0167 - accuracy: 0.9985 - val_loss: 0.3146 - val_accuracy: 0.9348
-Epoch 393/1000
-60000/60000 - 6s - loss: 0.0166 - accuracy: 0.9984 - val_loss: 0.3152 - val_accuracy: 0.9348
-Epoch 394/1000
-60000/60000 - 6s - loss: 0.0165 - accuracy: 0.9986 - val_loss: 0.3156 - val_accuracy: 0.9344
-Epoch 395/1000
-60000/60000 - 6s - loss: 0.0165 - accuracy: 0.9985 - val_loss: 0.3162 - val_accuracy: 0.9344
-Epoch 396/1000
-60000/60000 - 6s - loss: 0.0165 - accuracy: 0.9985 - val_loss: 0.3158 - val_accuracy: 0.9361
-Epoch 397/1000
-60000/60000 - 6s - loss: 0.0164 - accuracy: 0.9986 - val_loss: 0.3166 - val_accuracy: 0.9357
-Epoch 398/1000
-60000/60000 - 6s - loss: 0.0164 - accuracy: 0.9986 - val_loss: 0.3173 - val_accuracy: 0.9346
-Epoch 399/1000
-60000/60000 - 6s - loss: 0.0163 - accuracy: 0.9986 - val_loss: 0.3160 - val_accuracy: 0.9352
-Epoch 400/1000
-60000/60000 - 6s - loss: 0.0163 - accuracy: 0.9986 - val_loss: 0.3166 - val_accuracy: 0.9355
-Epoch 401/1000
-60000/60000 - 6s - loss: 0.0162 - accuracy: 0.9986 - val_loss: 0.3177 - val_accuracy: 0.9347
-Epoch 402/1000
-60000/60000 - 6s - loss: 0.0162 - accuracy: 0.9987 - val_loss: 0.3192 - val_accuracy: 0.9346
-Epoch 403/1000
-60000/60000 - 6s - loss: 0.0161 - accuracy: 0.9987 - val_loss: 0.3190 - val_accuracy: 0.9347
-Epoch 404/1000
-60000/60000 - 6s - loss: 0.0161 - accuracy: 0.9986 - val_loss: 0.3190 - val_accuracy: 0.9349
-Epoch 405/1000
-60000/60000 - 6s - loss: 0.0160 - accuracy: 0.9987 - val_loss: 0.3179 - val_accuracy: 0.9346
-Epoch 406/1000
-60000/60000 - 6s - loss: 0.0160 - accuracy: 0.9987 - val_loss: 0.3192 - val_accuracy: 0.9355
-Epoch 407/1000
-60000/60000 - 6s - loss: 0.0159 - accuracy: 0.9987 - val_loss: 0.3199 - val_accuracy: 0.9351
-Epoch 408/1000
-60000/60000 - 6s - loss: 0.0159 - accuracy: 0.9987 - val_loss: 0.3200 - val_accuracy: 0.9348
-Epoch 409/1000
-60000/60000 - 6s - loss: 0.0158 - accuracy: 0.9987 - val_loss: 0.3200 - val_accuracy: 0.9352
-Epoch 410/1000
-60000/60000 - 6s - loss: 0.0158 - accuracy: 0.9987 - val_loss: 0.3202 - val_accuracy: 0.9346
-Epoch 411/1000
-60000/60000 - 6s - loss: 0.0157 - accuracy: 0.9986 - val_loss: 0.3203 - val_accuracy: 0.9346
-Epoch 412/1000
-60000/60000 - 6s - loss: 0.0157 - accuracy: 0.9987 - val_loss: 0.3210 - val_accuracy: 0.9348
-Epoch 413/1000
-60000/60000 - 6s - loss: 0.0156 - accuracy: 0.9988 - val_loss: 0.3213 - val_accuracy: 0.9347
-Epoch 414/1000
-60000/60000 - 6s - loss: 0.0156 - accuracy: 0.9987 - val_loss: 0.3230 - val_accuracy: 0.9347
-Epoch 415/1000
-60000/60000 - 6s - loss: 0.0155 - accuracy: 0.9987 - val_loss: 0.3220 - val_accuracy: 0.9351
-Epoch 416/1000
-60000/60000 - 6s - loss: 0.0155 - accuracy: 0.9988 - val_loss: 0.3226 - val_accuracy: 0.9348
-Epoch 417/1000
-60000/60000 - 6s - loss: 0.0154 - accuracy: 0.9988 - val_loss: 0.3231 - val_accuracy: 0.9346
-Epoch 418/1000
-60000/60000 - 6s - loss: 0.0154 - accuracy: 0.9988 - val_loss: 0.3239 - val_accuracy: 0.9349
-Epoch 419/1000
-60000/60000 - 6s - loss: 0.0154 - accuracy: 0.9987 - val_loss: 0.3228 - val_accuracy: 0.9349
-Epoch 420/1000
-60000/60000 - 6s - loss: 0.0153 - accuracy: 0.9988 - val_loss: 0.3245 - val_accuracy: 0.9354
-Epoch 421/1000
-60000/60000 - 6s - loss: 0.0153 - accuracy: 0.9987 - val_loss: 0.3240 - val_accuracy: 0.9345
-Epoch 422/1000
-60000/60000 - 6s - loss: 0.0152 - accuracy: 0.9988 - val_loss: 0.3243 - val_accuracy: 0.9347
-Epoch 423/1000
-60000/60000 - 6s - loss: 0.0152 - accuracy: 0.9988 - val_loss: 0.3249 - val_accuracy: 0.9339
-Epoch 424/1000
-60000/60000 - 7s - loss: 0.0151 - accuracy: 0.9988 - val_loss: 0.3252 - val_accuracy: 0.9351
-Epoch 425/1000
-60000/60000 - 6s - loss: 0.0151 - accuracy: 0.9988 - val_loss: 0.3248 - val_accuracy: 0.9348
-Epoch 426/1000
-60000/60000 - 6s - loss: 0.0150 - accuracy: 0.9988 - val_loss: 0.3247 - val_accuracy: 0.9337
-Epoch 427/1000
-60000/60000 - 6s - loss: 0.0150 - accuracy: 0.9988 - val_loss: 0.3256 - val_accuracy: 0.9352
-Epoch 428/1000
-60000/60000 - 6s - loss: 0.0150 - accuracy: 0.9988 - val_loss: 0.3261 - val_accuracy: 0.9353
-Epoch 429/1000
-60000/60000 - 7s - loss: 0.0149 - accuracy: 0.9988 - val_loss: 0.3264 - val_accuracy: 0.9348
-Epoch 430/1000
-60000/60000 - 7s - loss: 0.0149 - accuracy: 0.9989 - val_loss: 0.3269 - val_accuracy: 0.9353
-Epoch 431/1000
-60000/60000 - 7s - loss: 0.0148 - accuracy: 0.9988 - val_loss: 0.3275 - val_accuracy: 0.9346
-Epoch 432/1000
-60000/60000 - 7s - loss: 0.0148 - accuracy: 0.9988 - val_loss: 0.3271 - val_accuracy: 0.9345
-Epoch 433/1000
-60000/60000 - 6s - loss: 0.0148 - accuracy: 0.9989 - val_loss: 0.3275 - val_accuracy: 0.9353
-Epoch 434/1000
-60000/60000 - 6s - loss: 0.0147 - accuracy: 0.9988 - val_loss: 0.3280 - val_accuracy: 0.9350
-Epoch 435/1000
-60000/60000 - 7s - loss: 0.0147 - accuracy: 0.9988 - val_loss: 0.3290 - val_accuracy: 0.9342
-Epoch 436/1000
-60000/60000 - 6s - loss: 0.0146 - accuracy: 0.9988 - val_loss: 0.3276 - val_accuracy: 0.9350
-Epoch 437/1000
-60000/60000 - 6s - loss: 0.0146 - accuracy: 0.9989 - val_loss: 0.3283 - val_accuracy: 0.9345
-Epoch 438/1000
-60000/60000 - 6s - loss: 0.0146 - accuracy: 0.9988 - val_loss: 0.3291 - val_accuracy: 0.9348
-Epoch 439/1000
-60000/60000 - 6s - loss: 0.0145 - accuracy: 0.9988 - val_loss: 0.3279 - val_accuracy: 0.9343
-Epoch 440/1000
-60000/60000 - 6s - loss: 0.0145 - accuracy: 0.9989 - val_loss: 0.3291 - val_accuracy: 0.9350
-Epoch 441/1000
-60000/60000 - 6s - loss: 0.0145 - accuracy: 0.9989 - val_loss: 0.3298 - val_accuracy: 0.9347
-Epoch 442/1000
-60000/60000 - 6s - loss: 0.0144 - accuracy: 0.9989 - val_loss: 0.3302 - val_accuracy: 0.9346
-Epoch 443/1000
-60000/60000 - 6s - loss: 0.0143 - accuracy: 0.9989 - val_loss: 0.3309 - val_accuracy: 0.9348
-Epoch 444/1000
-60000/60000 - 6s - loss: 0.0143 - accuracy: 0.9989 - val_loss: 0.3303 - val_accuracy: 0.9346
-Epoch 445/1000
-60000/60000 - 6s - loss: 0.0143 - accuracy: 0.9989 - val_loss: 0.3316 - val_accuracy: 0.9341
-Epoch 446/1000
-60000/60000 - 6s - loss: 0.0142 - accuracy: 0.9989 - val_loss: 0.3306 - val_accuracy: 0.9343
-Epoch 447/1000
-60000/60000 - 6s - loss: 0.0142 - accuracy: 0.9989 - val_loss: 0.3309 - val_accuracy: 0.9352
-Epoch 448/1000
-60000/60000 - 6s - loss: 0.0142 - accuracy: 0.9990 - val_loss: 0.3313 - val_accuracy: 0.9353
-Epoch 449/1000
-60000/60000 - 6s - loss: 0.0141 - accuracy: 0.9989 - val_loss: 0.3329 - val_accuracy: 0.9347
-Epoch 450/1000
-60000/60000 - 6s - loss: 0.0141 - accuracy: 0.9988 - val_loss: 0.3325 - val_accuracy: 0.9352
-Epoch 451/1000
-60000/60000 - 6s - loss: 0.0140 - accuracy: 0.9990 - val_loss: 0.3334 - val_accuracy: 0.9345
-Epoch 452/1000
-60000/60000 - 6s - loss: 0.0140 - accuracy: 0.9990 - val_loss: 0.3338 - val_accuracy: 0.9347
-Epoch 453/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9989 - val_loss: 0.3331 - val_accuracy: 0.9347
-Epoch 454/1000
-60000/60000 - 6s - loss: 0.0140 - accuracy: 0.9990 - val_loss: 0.3344 - val_accuracy: 0.9344
-Epoch 455/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9990 - val_loss: 0.3342 - val_accuracy: 0.9345
-Epoch 456/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9990 - val_loss: 0.3348 - val_accuracy: 0.9345
-Epoch 457/1000
-60000/60000 - 6s - loss: 0.0138 - accuracy: 0.9990 - val_loss: 0.3345 - val_accuracy: 0.9339
-Epoch 458/1000
-60000/60000 - 6s - loss: 0.0138 - accuracy: 0.9990 - val_loss: 0.3347 - val_accuracy: 0.9343
-Epoch 459/1000
-60000/60000 - 6s - loss: 0.0138 - accuracy: 0.9989 - val_loss: 0.3350 - val_accuracy: 0.9341
-Epoch 460/1000
-60000/60000 - 6s - loss: 0.0137 - accuracy: 0.9990 - val_loss: 0.3356 - val_accuracy: 0.9349
-Epoch 461/1000
-60000/60000 - 6s - loss: 0.0137 - accuracy: 0.9990 - val_loss: 0.3363 - val_accuracy: 0.9344
-Epoch 462/1000
-60000/60000 - 6s - loss: 0.0136 - accuracy: 0.9990 - val_loss: 0.3366 - val_accuracy: 0.9346
-Epoch 463/1000
-60000/60000 - 6s - loss: 0.0136 - accuracy: 0.9990 - val_loss: 0.3359 - val_accuracy: 0.9338
-Epoch 464/1000
-60000/60000 - 6s - loss: 0.0136 - accuracy: 0.9991 - val_loss: 0.3361 - val_accuracy: 0.9343
-Epoch 465/1000
-60000/60000 - 6s - loss: 0.0135 - accuracy: 0.9991 - val_loss: 0.3368 - val_accuracy: 0.9346
-Epoch 466/1000
-60000/60000 - 6s - loss: 0.0135 - accuracy: 0.9989 - val_loss: 0.3373 - val_accuracy: 0.9344
-Epoch 467/1000
-60000/60000 - 6s - loss: 0.0135 - accuracy: 0.9991 - val_loss: 0.3369 - val_accuracy: 0.9340
-Epoch 468/1000
-60000/60000 - 6s - loss: 0.0134 - accuracy: 0.9991 - val_loss: 0.3376 - val_accuracy: 0.9347
-Epoch 469/1000
-60000/60000 - 6s - loss: 0.0134 - accuracy: 0.9990 - val_loss: 0.3383 - val_accuracy: 0.9344
-Epoch 470/1000
-60000/60000 - 6s - loss: 0.0134 - accuracy: 0.9991 - val_loss: 0.3378 - val_accuracy: 0.9340
-Epoch 471/1000
-60000/60000 - 6s - loss: 0.0133 - accuracy: 0.9991 - val_loss: 0.3386 - val_accuracy: 0.9336
-Epoch 472/1000
-60000/60000 - 6s - loss: 0.0133 - accuracy: 0.9991 - val_loss: 0.3384 - val_accuracy: 0.9338
-Epoch 473/1000
-60000/60000 - 6s - loss: 0.0133 - accuracy: 0.9991 - val_loss: 0.3386 - val_accuracy: 0.9340
-Epoch 474/1000
-60000/60000 - 6s - loss: 0.0133 - accuracy: 0.9991 - val_loss: 0.3391 - val_accuracy: 0.9340
-Epoch 475/1000
-60000/60000 - 6s - loss: 0.0132 - accuracy: 0.9991 - val_loss: 0.3393 - val_accuracy: 0.9342
-Epoch 476/1000
-60000/60000 - 6s - loss: 0.0131 - accuracy: 0.9991 - val_loss: 0.3400 - val_accuracy: 0.9341
-Epoch 477/1000
-60000/60000 - 6s - loss: 0.0131 - accuracy: 0.9991 - val_loss: 0.3401 - val_accuracy: 0.9341
-Epoch 478/1000
-60000/60000 - 6s - loss: 0.0131 - accuracy: 0.9991 - val_loss: 0.3404 - val_accuracy: 0.9345
-Epoch 479/1000
-60000/60000 - 6s - loss: 0.0131 - accuracy: 0.9991 - val_loss: 0.3405 - val_accuracy: 0.9346
-Epoch 480/1000
-60000/60000 - 6s - loss: 0.0130 - accuracy: 0.9992 - val_loss: 0.3401 - val_accuracy: 0.9340
-Epoch 481/1000
-60000/60000 - 6s - loss: 0.0130 - accuracy: 0.9991 - val_loss: 0.3412 - val_accuracy: 0.9342
-Epoch 482/1000
-60000/60000 - 6s - loss: 0.0130 - accuracy: 0.9991 - val_loss: 0.3402 - val_accuracy: 0.9337
-Epoch 483/1000
-60000/60000 - 6s - loss: 0.0129 - accuracy: 0.9991 - val_loss: 0.3411 - val_accuracy: 0.9339
-Epoch 484/1000
-60000/60000 - 6s - loss: 0.0129 - accuracy: 0.9992 - val_loss: 0.3414 - val_accuracy: 0.9350
-Epoch 485/1000
-60000/60000 - 6s - loss: 0.0128 - accuracy: 0.9991 - val_loss: 0.3427 - val_accuracy: 0.9341
-Epoch 486/1000
-60000/60000 - 6s - loss: 0.0129 - accuracy: 0.9991 - val_loss: 0.3421 - val_accuracy: 0.9339
-Epoch 487/1000
-60000/60000 - 6s - loss: 0.0128 - accuracy: 0.9991 - val_loss: 0.3431 - val_accuracy: 0.9339
-Epoch 488/1000
-60000/60000 - 6s - loss: 0.0128 - accuracy: 0.9991 - val_loss: 0.3424 - val_accuracy: 0.9343
-Epoch 489/1000
-60000/60000 - 6s - loss: 0.0127 - accuracy: 0.9991 - val_loss: 0.3439 - val_accuracy: 0.9338
-Epoch 490/1000
-60000/60000 - 6s - loss: 0.0127 - accuracy: 0.9991 - val_loss: 0.3420 - val_accuracy: 0.9339
-Epoch 491/1000
-60000/60000 - 6s - loss: 0.0127 - accuracy: 0.9991 - val_loss: 0.3436 - val_accuracy: 0.9339
-Epoch 492/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9992 - val_loss: 0.3435 - val_accuracy: 0.9338
-Epoch 493/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9992 - val_loss: 0.3437 - val_accuracy: 0.9346
-Epoch 494/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9992 - val_loss: 0.3437 - val_accuracy: 0.9339
-Epoch 495/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9992 - val_loss: 0.3448 - val_accuracy: 0.9342
-Epoch 496/1000
-60000/60000 - 6s - loss: 0.0125 - accuracy: 0.9991 - val_loss: 0.3449 - val_accuracy: 0.9347
-Epoch 497/1000
-60000/60000 - 6s - loss: 0.0125 - accuracy: 0.9992 - val_loss: 0.3446 - val_accuracy: 0.9338
-Epoch 498/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9992 - val_loss: 0.3457 - val_accuracy: 0.9338
-Epoch 499/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9992 - val_loss: 0.3445 - val_accuracy: 0.9345
-Epoch 500/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9992 - val_loss: 0.3468 - val_accuracy: 0.9340
-Epoch 501/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9992 - val_loss: 0.3458 - val_accuracy: 0.9343
-Epoch 502/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9991 - val_loss: 0.3475 - val_accuracy: 0.9344
-Epoch 503/1000
-60000/60000 - 6s - loss: 0.0123 - accuracy: 0.9992 - val_loss: 0.3466 - val_accuracy: 0.9334
-Epoch 504/1000
-60000/60000 - 6s - loss: 0.0123 - accuracy: 0.9992 - val_loss: 0.3472 - val_accuracy: 0.9338
-Epoch 505/1000
-60000/60000 - 6s - loss: 0.0123 - accuracy: 0.9992 - val_loss: 0.3469 - val_accuracy: 0.9343
-Epoch 506/1000
-60000/60000 - 6s - loss: 0.0122 - accuracy: 0.9992 - val_loss: 0.3481 - val_accuracy: 0.9333
-Epoch 507/1000
-60000/60000 - 6s - loss: 0.0122 - accuracy: 0.9992 - val_loss: 0.3483 - val_accuracy: 0.9341
-Epoch 508/1000
-60000/60000 - 6s - loss: 0.0122 - accuracy: 0.9992 - val_loss: 0.3473 - val_accuracy: 0.9336
-Epoch 509/1000
-60000/60000 - 6s - loss: 0.0122 - accuracy: 0.9992 - val_loss: 0.3482 - val_accuracy: 0.9344
-Epoch 510/1000
-60000/60000 - 6s - loss: 0.0121 - accuracy: 0.9992 - val_loss: 0.3488 - val_accuracy: 0.9339
-Epoch 511/1000
-60000/60000 - 6s - loss: 0.0121 - accuracy: 0.9992 - val_loss: 0.3495 - val_accuracy: 0.9339
-Epoch 512/1000
-60000/60000 - 6s - loss: 0.0121 - accuracy: 0.9992 - val_loss: 0.3493 - val_accuracy: 0.9336
-Epoch 513/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9992 - val_loss: 0.3491 - val_accuracy: 0.9335
-Epoch 514/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9992 - val_loss: 0.3497 - val_accuracy: 0.9336
-Epoch 515/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9992 - val_loss: 0.3488 - val_accuracy: 0.9340
-Epoch 516/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9992 - val_loss: 0.3491 - val_accuracy: 0.9336
-Epoch 517/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9992 - val_loss: 0.3496 - val_accuracy: 0.9342
-Epoch 518/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9992 - val_loss: 0.3502 - val_accuracy: 0.9337
-Epoch 519/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9992 - val_loss: 0.3515 - val_accuracy: 0.9333
-Epoch 520/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9992 - val_loss: 0.3515 - val_accuracy: 0.9339
-Epoch 521/1000
-60000/60000 - 6s - loss: 0.0118 - accuracy: 0.9992 - val_loss: 0.3505 - val_accuracy: 0.9330
-Epoch 522/1000
-60000/60000 - 6s - loss: 0.0118 - accuracy: 0.9992 - val_loss: 0.3521 - val_accuracy: 0.9336
-Epoch 523/1000
-60000/60000 - 6s - loss: 0.0118 - accuracy: 0.9992 - val_loss: 0.3510 - val_accuracy: 0.9336
-Epoch 524/1000
-60000/60000 - 6s - loss: 0.0117 - accuracy: 0.9993 - val_loss: 0.3516 - val_accuracy: 0.9338
-Epoch 525/1000
-60000/60000 - 6s - loss: 0.0117 - accuracy: 0.9992 - val_loss: 0.3531 - val_accuracy: 0.9336
-Epoch 526/1000
-60000/60000 - 6s - loss: 0.0117 - accuracy: 0.9992 - val_loss: 0.3524 - val_accuracy: 0.9339
-Epoch 527/1000
-60000/60000 - 6s - loss: 0.0116 - accuracy: 0.9992 - val_loss: 0.3527 - val_accuracy: 0.9328
-Epoch 528/1000
-60000/60000 - 6s - loss: 0.0117 - accuracy: 0.9993 - val_loss: 0.3521 - val_accuracy: 0.9332
-Epoch 529/1000
-60000/60000 - 6s - loss: 0.0116 - accuracy: 0.9992 - val_loss: 0.3534 - val_accuracy: 0.9337
-Epoch 530/1000
-60000/60000 - 6s - loss: 0.0116 - accuracy: 0.9992 - val_loss: 0.3534 - val_accuracy: 0.9332
-Epoch 531/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9993 - val_loss: 0.3537 - val_accuracy: 0.9334
-Epoch 532/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9992 - val_loss: 0.3545 - val_accuracy: 0.9336
-Epoch 533/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9993 - val_loss: 0.3537 - val_accuracy: 0.9339
-Epoch 534/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9993 - val_loss: 0.3545 - val_accuracy: 0.9337
-Epoch 535/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9992 - val_loss: 0.3550 - val_accuracy: 0.9342
-Epoch 536/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9992 - val_loss: 0.3547 - val_accuracy: 0.9339
-Epoch 537/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9993 - val_loss: 0.3552 - val_accuracy: 0.9338
-Epoch 538/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9993 - val_loss: 0.3556 - val_accuracy: 0.9339
-Epoch 539/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9993 - val_loss: 0.3570 - val_accuracy: 0.9329
-Epoch 540/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9992 - val_loss: 0.3558 - val_accuracy: 0.9335
-Epoch 541/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9993 - val_loss: 0.3558 - val_accuracy: 0.9338
-Epoch 542/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9993 - val_loss: 0.3569 - val_accuracy: 0.9336
-Epoch 543/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9993 - val_loss: 0.3566 - val_accuracy: 0.9337
-Epoch 544/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9993 - val_loss: 0.3573 - val_accuracy: 0.9337
-Epoch 545/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9993 - val_loss: 0.3573 - val_accuracy: 0.9331
-Epoch 546/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9993 - val_loss: 0.3570 - val_accuracy: 0.9329
-Epoch 547/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9992 - val_loss: 0.3568 - val_accuracy: 0.9334
-Epoch 548/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9993 - val_loss: 0.3576 - val_accuracy: 0.9333
-Epoch 549/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9993 - val_loss: 0.3583 - val_accuracy: 0.9333
-Epoch 550/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9992 - val_loss: 0.3588 - val_accuracy: 0.9334
-Epoch 551/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9993 - val_loss: 0.3595 - val_accuracy: 0.9335
-Epoch 552/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9993 - val_loss: 0.3590 - val_accuracy: 0.9337
-Epoch 553/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9993 - val_loss: 0.3595 - val_accuracy: 0.9329
-Epoch 554/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9993 - val_loss: 0.3601 - val_accuracy: 0.9330
-Epoch 555/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9993 - val_loss: 0.3598 - val_accuracy: 0.9334
-Epoch 556/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9993 - val_loss: 0.3599 - val_accuracy: 0.9337
-Epoch 557/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9993 - val_loss: 0.3593 - val_accuracy: 0.9329
-Epoch 558/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9993 - val_loss: 0.3600 - val_accuracy: 0.9330
-Epoch 559/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9993 - val_loss: 0.3600 - val_accuracy: 0.9333
-Epoch 560/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9993 - val_loss: 0.3604 - val_accuracy: 0.9336
-Epoch 561/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9993 - val_loss: 0.3609 - val_accuracy: 0.9336
-Epoch 562/1000
-60000/60000 - 6s - loss: 0.0108 - accuracy: 0.9993 - val_loss: 0.3606 - val_accuracy: 0.9334
-Epoch 563/1000
-60000/60000 - 6s - loss: 0.0108 - accuracy: 0.9993 - val_loss: 0.3612 - val_accuracy: 0.9329
-Epoch 564/1000
-60000/60000 - 6s - loss: 0.0108 - accuracy: 0.9993 - val_loss: 0.3615 - val_accuracy: 0.9330
-Epoch 565/1000
-60000/60000 - 6s - loss: 0.0108 - accuracy: 0.9993 - val_loss: 0.3616 - val_accuracy: 0.9328
-Epoch 566/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9993 - val_loss: 0.3620 - val_accuracy: 0.9329
-Epoch 567/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9993 - val_loss: 0.3617 - val_accuracy: 0.9331
-Epoch 568/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9993 - val_loss: 0.3622 - val_accuracy: 0.9328
-Epoch 569/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9993 - val_loss: 0.3637 - val_accuracy: 0.9331
-Epoch 570/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9993 - val_loss: 0.3639 - val_accuracy: 0.9333
-Epoch 571/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9993 - val_loss: 0.3636 - val_accuracy: 0.9327
-Epoch 572/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9993 - val_loss: 0.3632 - val_accuracy: 0.9330
-Epoch 573/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9993 - val_loss: 0.3635 - val_accuracy: 0.9329
-Epoch 574/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9993 - val_loss: 0.3634 - val_accuracy: 0.9333
-Epoch 575/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9993 - val_loss: 0.3636 - val_accuracy: 0.9327
-Epoch 576/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9993 - val_loss: 0.3636 - val_accuracy: 0.9334
-Epoch 577/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9993 - val_loss: 0.3650 - val_accuracy: 0.9329
-Epoch 578/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9993 - val_loss: 0.3642 - val_accuracy: 0.9327
-Epoch 579/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9993 - val_loss: 0.3653 - val_accuracy: 0.9326
-Epoch 580/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9993 - val_loss: 0.3651 - val_accuracy: 0.9321
-Epoch 581/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9993 - val_loss: 0.3654 - val_accuracy: 0.9329
-Epoch 582/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9993 - val_loss: 0.3664 - val_accuracy: 0.9325
-Epoch 583/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9993 - val_loss: 0.3660 - val_accuracy: 0.9322
-Epoch 584/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9993 - val_loss: 0.3661 - val_accuracy: 0.9321
-Epoch 585/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9994 - val_loss: 0.3669 - val_accuracy: 0.9326
-Epoch 586/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9993 - val_loss: 0.3658 - val_accuracy: 0.9321
-Epoch 587/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9994 - val_loss: 0.3672 - val_accuracy: 0.9320
-Epoch 588/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9993 - val_loss: 0.3671 - val_accuracy: 0.9324
-Epoch 589/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9993 - val_loss: 0.3665 - val_accuracy: 0.9321
-Epoch 590/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9993 - val_loss: 0.3672 - val_accuracy: 0.9330
-Epoch 591/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9993 - val_loss: 0.3675 - val_accuracy: 0.9319
-Epoch 592/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9994 - val_loss: 0.3680 - val_accuracy: 0.9327
-Epoch 593/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9993 - val_loss: 0.3687 - val_accuracy: 0.9329
-Epoch 594/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9993 - val_loss: 0.3684 - val_accuracy: 0.9325
-Epoch 595/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9993 - val_loss: 0.3690 - val_accuracy: 0.9324
-Epoch 596/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9994 - val_loss: 0.3686 - val_accuracy: 0.9323
-Epoch 597/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9993 - val_loss: 0.3697 - val_accuracy: 0.9324
-Epoch 598/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9993 - val_loss: 0.3693 - val_accuracy: 0.9323
-Epoch 599/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9994 - val_loss: 0.3700 - val_accuracy: 0.9323
-Epoch 600/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9994 - val_loss: 0.3701 - val_accuracy: 0.9322
-Epoch 601/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9993 - val_loss: 0.3691 - val_accuracy: 0.9324
-Epoch 602/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9993 - val_loss: 0.3700 - val_accuracy: 0.9317
-Epoch 603/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9994 - val_loss: 0.3705 - val_accuracy: 0.9327
-Epoch 604/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9994 - val_loss: 0.3706 - val_accuracy: 0.9322
-Epoch 605/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9994 - val_loss: 0.3709 - val_accuracy: 0.9327
-Epoch 606/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9994 - val_loss: 0.3715 - val_accuracy: 0.9322
-Epoch 607/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9993 - val_loss: 0.3712 - val_accuracy: 0.9326
-Epoch 608/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9993 - val_loss: 0.3714 - val_accuracy: 0.9325
-Epoch 609/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9994 - val_loss: 0.3722 - val_accuracy: 0.9325
-Epoch 610/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9994 - val_loss: 0.3722 - val_accuracy: 0.9322
-Epoch 611/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9994 - val_loss: 0.3722 - val_accuracy: 0.9323
-Epoch 612/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9994 - val_loss: 0.3727 - val_accuracy: 0.9321
-Epoch 613/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9994 - val_loss: 0.3727 - val_accuracy: 0.9318
-Epoch 614/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9994 - val_loss: 0.3725 - val_accuracy: 0.9319
-Epoch 615/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9994 - val_loss: 0.3734 - val_accuracy: 0.9319
-Epoch 616/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9993 - val_loss: 0.3733 - val_accuracy: 0.9326
-Epoch 617/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9994 - val_loss: 0.3737 - val_accuracy: 0.9318
-Epoch 618/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9994 - val_loss: 0.3733 - val_accuracy: 0.9318
-Epoch 619/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9994 - val_loss: 0.3739 - val_accuracy: 0.9325
-Epoch 620/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9994 - val_loss: 0.3739 - val_accuracy: 0.9321
-Epoch 621/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9994 - val_loss: 0.3735 - val_accuracy: 0.9320
-Epoch 622/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9993 - val_loss: 0.3745 - val_accuracy: 0.9323
-Epoch 623/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9994 - val_loss: 0.3744 - val_accuracy: 0.9323
-Epoch 624/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9994 - val_loss: 0.3746 - val_accuracy: 0.9323
-Epoch 625/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9994 - val_loss: 0.3752 - val_accuracy: 0.9318
-Epoch 626/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9994 - val_loss: 0.3758 - val_accuracy: 0.9315
-Epoch 627/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9994 - val_loss: 0.3754 - val_accuracy: 0.9318
-Epoch 628/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9994 - val_loss: 0.3758 - val_accuracy: 0.9321
-Epoch 629/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9994 - val_loss: 0.3764 - val_accuracy: 0.9323
-Epoch 630/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9994 - val_loss: 0.3772 - val_accuracy: 0.9320
-Epoch 631/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9994 - val_loss: 0.3768 - val_accuracy: 0.9316
-Epoch 632/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9994 - val_loss: 0.3764 - val_accuracy: 0.9320
-Epoch 633/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9994 - val_loss: 0.3767 - val_accuracy: 0.9321
-Epoch 634/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9994 - val_loss: 0.3776 - val_accuracy: 0.9322
-Epoch 635/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9994 - val_loss: 0.3772 - val_accuracy: 0.9322
-Epoch 636/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9994 - val_loss: 0.3783 - val_accuracy: 0.9315
-Epoch 637/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9994 - val_loss: 0.3774 - val_accuracy: 0.9323
-Epoch 638/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9994 - val_loss: 0.3776 - val_accuracy: 0.9319
-Epoch 639/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9994 - val_loss: 0.3779 - val_accuracy: 0.9326
-Epoch 640/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9994 - val_loss: 0.3775 - val_accuracy: 0.9319
-Epoch 641/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9994 - val_loss: 0.3790 - val_accuracy: 0.9319
-Epoch 642/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9994 - val_loss: 0.3790 - val_accuracy: 0.9315
-Epoch 643/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9994 - val_loss: 0.3803 - val_accuracy: 0.9315
-Epoch 644/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9994 - val_loss: 0.3785 - val_accuracy: 0.9319
-Epoch 645/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9994 - val_loss: 0.3797 - val_accuracy: 0.9325
-Epoch 646/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9994 - val_loss: 0.3803 - val_accuracy: 0.9315
-Epoch 647/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9994 - val_loss: 0.3795 - val_accuracy: 0.9320
-Epoch 648/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9994 - val_loss: 0.3800 - val_accuracy: 0.9324
-Epoch 649/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9994 - val_loss: 0.3800 - val_accuracy: 0.9318
-Epoch 650/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9994 - val_loss: 0.3810 - val_accuracy: 0.9312
-Epoch 651/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9994 - val_loss: 0.3803 - val_accuracy: 0.9320
-Epoch 652/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9994 - val_loss: 0.3813 - val_accuracy: 0.9315
-Epoch 653/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9994 - val_loss: 0.3811 - val_accuracy: 0.9321
-Epoch 654/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9994 - val_loss: 0.3811 - val_accuracy: 0.9316
-Epoch 655/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9994 - val_loss: 0.3815 - val_accuracy: 0.9321
-Epoch 656/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.3808 - val_accuracy: 0.9316
-Epoch 657/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.3817 - val_accuracy: 0.9320
-Epoch 658/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.3823 - val_accuracy: 0.9318
-Epoch 659/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.3823 - val_accuracy: 0.9324
-Epoch 660/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.3824 - val_accuracy: 0.9319
-Epoch 661/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.3821 - val_accuracy: 0.9319
-Epoch 662/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.3824 - val_accuracy: 0.9318
-Epoch 663/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9994 - val_loss: 0.3819 - val_accuracy: 0.9319
-Epoch 664/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9994 - val_loss: 0.3832 - val_accuracy: 0.9319
-Epoch 665/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9994 - val_loss: 0.3838 - val_accuracy: 0.9314
-Epoch 666/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9994 - val_loss: 0.3839 - val_accuracy: 0.9315
-Epoch 667/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9994 - val_loss: 0.3844 - val_accuracy: 0.9313
-Epoch 668/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9994 - val_loss: 0.3852 - val_accuracy: 0.9313
-Epoch 669/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9994 - val_loss: 0.3845 - val_accuracy: 0.9322
-Epoch 670/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9994 - val_loss: 0.3847 - val_accuracy: 0.9313
-Epoch 671/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9994 - val_loss: 0.3846 - val_accuracy: 0.9317
-Epoch 672/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9995 - val_loss: 0.3844 - val_accuracy: 0.9314
-Epoch 673/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9994 - val_loss: 0.3850 - val_accuracy: 0.9316
-Epoch 674/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9995 - val_loss: 0.3848 - val_accuracy: 0.9316
-Epoch 675/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9994 - val_loss: 0.3854 - val_accuracy: 0.9317
-Epoch 676/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9994 - val_loss: 0.3859 - val_accuracy: 0.9314
-Epoch 677/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9995 - val_loss: 0.3857 - val_accuracy: 0.9319
-Epoch 678/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9995 - val_loss: 0.3860 - val_accuracy: 0.9319
-Epoch 679/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9994 - val_loss: 0.3859 - val_accuracy: 0.9314
-Epoch 680/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9994 - val_loss: 0.3857 - val_accuracy: 0.9317
-Epoch 681/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9994 - val_loss: 0.3865 - val_accuracy: 0.9318
-Epoch 682/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9994 - val_loss: 0.3870 - val_accuracy: 0.9317
-Epoch 683/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9995 - val_loss: 0.3874 - val_accuracy: 0.9315
-Epoch 684/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9994 - val_loss: 0.3872 - val_accuracy: 0.9327
-Epoch 685/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9995 - val_loss: 0.3873 - val_accuracy: 0.9319
-Epoch 686/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9995 - val_loss: 0.3872 - val_accuracy: 0.9318
-Epoch 687/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9995 - val_loss: 0.3877 - val_accuracy: 0.9318
-Epoch 688/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9994 - val_loss: 0.3877 - val_accuracy: 0.9318
-Epoch 689/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9995 - val_loss: 0.3882 - val_accuracy: 0.9314
-Epoch 690/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9995 - val_loss: 0.3887 - val_accuracy: 0.9314
-Epoch 691/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9995 - val_loss: 0.3885 - val_accuracy: 0.9323
-Epoch 692/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9995 - val_loss: 0.3899 - val_accuracy: 0.9311
-Epoch 693/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9995 - val_loss: 0.3896 - val_accuracy: 0.9316
-Epoch 694/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9994 - val_loss: 0.3896 - val_accuracy: 0.9313
-Epoch 695/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9995 - val_loss: 0.3898 - val_accuracy: 0.9315
-Epoch 696/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9995 - val_loss: 0.3897 - val_accuracy: 0.9315
-Epoch 697/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9994 - val_loss: 0.3897 - val_accuracy: 0.9313
-Epoch 698/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9995 - val_loss: 0.3903 - val_accuracy: 0.9317
-Epoch 699/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9994 - val_loss: 0.3897 - val_accuracy: 0.9316
-Epoch 700/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9995 - val_loss: 0.3902 - val_accuracy: 0.9315
-Epoch 701/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9995 - val_loss: 0.3903 - val_accuracy: 0.9314
-Epoch 702/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9995 - val_loss: 0.3904 - val_accuracy: 0.9322
-Epoch 703/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9995 - val_loss: 0.3908 - val_accuracy: 0.9317
-Epoch 704/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9994 - val_loss: 0.3909 - val_accuracy: 0.9309
-Epoch 705/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9995 - val_loss: 0.3911 - val_accuracy: 0.9312
-Epoch 706/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9994 - val_loss: 0.3914 - val_accuracy: 0.9315
-Epoch 707/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9995 - val_loss: 0.3916 - val_accuracy: 0.9309
-Epoch 708/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9994 - val_loss: 0.3921 - val_accuracy: 0.9315
-Epoch 709/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9995 - val_loss: 0.3919 - val_accuracy: 0.9311
-Epoch 710/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9995 - val_loss: 0.3926 - val_accuracy: 0.9311
-Epoch 711/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9995 - val_loss: 0.3936 - val_accuracy: 0.9311
-Epoch 712/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9995 - val_loss: 0.3924 - val_accuracy: 0.9313
-Epoch 713/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9994 - val_loss: 0.3931 - val_accuracy: 0.9317
-Epoch 714/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9995 - val_loss: 0.3931 - val_accuracy: 0.9315
-Epoch 715/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9995 - val_loss: 0.3942 - val_accuracy: 0.9311
-Epoch 716/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9995 - val_loss: 0.3937 - val_accuracy: 0.9311
-Epoch 717/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9995 - val_loss: 0.3939 - val_accuracy: 0.9310
-Epoch 718/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9995 - val_loss: 0.3944 - val_accuracy: 0.9315
-Epoch 719/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9995 - val_loss: 0.3946 - val_accuracy: 0.9316
-Epoch 720/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9995 - val_loss: 0.3938 - val_accuracy: 0.9312
-Epoch 721/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3947 - val_accuracy: 0.9313
-Epoch 722/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3945 - val_accuracy: 0.9309
-Epoch 723/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3948 - val_accuracy: 0.9316
-Epoch 724/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3941 - val_accuracy: 0.9318
-Epoch 725/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3958 - val_accuracy: 0.9312
-Epoch 726/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3957 - val_accuracy: 0.9310
-Epoch 727/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3957 - val_accuracy: 0.9313
-Epoch 728/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9995 - val_loss: 0.3958 - val_accuracy: 0.9318
-Epoch 729/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3959 - val_accuracy: 0.9319
-Epoch 730/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3966 - val_accuracy: 0.9311
-Epoch 731/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3960 - val_accuracy: 0.9315
-Epoch 732/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3968 - val_accuracy: 0.9312
-Epoch 733/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3973 - val_accuracy: 0.9312
-Epoch 734/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3973 - val_accuracy: 0.9314
-Epoch 735/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3975 - val_accuracy: 0.9311
-Epoch 736/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3977 - val_accuracy: 0.9313
-Epoch 737/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9995 - val_loss: 0.3972 - val_accuracy: 0.9312
-Epoch 738/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3976 - val_accuracy: 0.9313
-Epoch 739/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3970 - val_accuracy: 0.9312
-Epoch 740/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3982 - val_accuracy: 0.9312
-Epoch 741/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3981 - val_accuracy: 0.9314
-Epoch 742/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3994 - val_accuracy: 0.9314
-Epoch 743/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3986 - val_accuracy: 0.9317
-Epoch 744/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3990 - val_accuracy: 0.9310
-Epoch 745/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3991 - val_accuracy: 0.9309
-Epoch 746/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9995 - val_loss: 0.3998 - val_accuracy: 0.9309
-Epoch 747/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.3989 - val_accuracy: 0.9309
-Epoch 748/1000
-60000/60000 - 7s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.3995 - val_accuracy: 0.9315
-Epoch 749/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.3990 - val_accuracy: 0.9312
-Epoch 750/1000
-60000/60000 - 7s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.4002 - val_accuracy: 0.9308
-Epoch 751/1000
-60000/60000 - 7s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.3997 - val_accuracy: 0.9314
-Epoch 752/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.4006 - val_accuracy: 0.9308
-Epoch 753/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.4008 - val_accuracy: 0.9317
-Epoch 754/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.4004 - val_accuracy: 0.9315
-Epoch 755/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9995 - val_loss: 0.4005 - val_accuracy: 0.9315
-Epoch 756/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4003 - val_accuracy: 0.9313
-Epoch 757/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4017 - val_accuracy: 0.9313
-Epoch 758/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4019 - val_accuracy: 0.9312
-Epoch 759/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4011 - val_accuracy: 0.9313
-Epoch 760/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4019 - val_accuracy: 0.9312
-Epoch 761/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4013 - val_accuracy: 0.9316
-Epoch 762/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4022 - val_accuracy: 0.9314
-Epoch 763/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4014 - val_accuracy: 0.9321
-Epoch 764/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9995 - val_loss: 0.4031 - val_accuracy: 0.9311
-Epoch 765/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4022 - val_accuracy: 0.9315
-Epoch 766/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4029 - val_accuracy: 0.9314
-Epoch 767/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4022 - val_accuracy: 0.9313
-Epoch 768/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4029 - val_accuracy: 0.9313
-Epoch 769/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4025 - val_accuracy: 0.9315
-Epoch 770/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4043 - val_accuracy: 0.9313
-Epoch 771/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4030 - val_accuracy: 0.9318
-Epoch 772/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4037 - val_accuracy: 0.9315
-Epoch 773/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4042 - val_accuracy: 0.9317
-Epoch 774/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9995 - val_loss: 0.4049 - val_accuracy: 0.9311
-Epoch 775/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4039 - val_accuracy: 0.9319
-Epoch 776/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4048 - val_accuracy: 0.9312
-Epoch 777/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4039 - val_accuracy: 0.9316
-Epoch 778/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4046 - val_accuracy: 0.9306
-Epoch 779/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4049 - val_accuracy: 0.9314
-Epoch 780/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4049 - val_accuracy: 0.9308
-Epoch 781/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4055 - val_accuracy: 0.9309
-Epoch 782/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4056 - val_accuracy: 0.9316
-Epoch 783/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4051 - val_accuracy: 0.9311
-Epoch 784/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9995 - val_loss: 0.4061 - val_accuracy: 0.9308
-Epoch 785/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4064 - val_accuracy: 0.9309
-Epoch 786/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4068 - val_accuracy: 0.9314
-Epoch 787/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4073 - val_accuracy: 0.9316
-Epoch 788/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4072 - val_accuracy: 0.9311
-Epoch 789/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4060 - val_accuracy: 0.9311
-Epoch 790/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4063 - val_accuracy: 0.9312
-Epoch 791/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4068 - val_accuracy: 0.9320
-Epoch 792/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4070 - val_accuracy: 0.9313
-Epoch 793/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4072 - val_accuracy: 0.9312
-Epoch 794/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9995 - val_loss: 0.4073 - val_accuracy: 0.9311
-Epoch 795/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4071 - val_accuracy: 0.9312
-Epoch 796/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4079 - val_accuracy: 0.9310
-Epoch 797/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4084 - val_accuracy: 0.9308
-Epoch 798/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4077 - val_accuracy: 0.9311
-Epoch 799/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4074 - val_accuracy: 0.9316
-Epoch 800/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4084 - val_accuracy: 0.9309
-Epoch 801/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4087 - val_accuracy: 0.9309
-Epoch 802/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4091 - val_accuracy: 0.9311
-Epoch 803/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4090 - val_accuracy: 0.9308
-Epoch 804/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9995 - val_loss: 0.4090 - val_accuracy: 0.9307
-Epoch 805/1000
-60000/60000 - 7s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4099 - val_accuracy: 0.9316
-Epoch 806/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4095 - val_accuracy: 0.9311
-Epoch 807/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4097 - val_accuracy: 0.9313
-Epoch 808/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4100 - val_accuracy: 0.9313
-Epoch 809/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4104 - val_accuracy: 0.9308
-Epoch 810/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4103 - val_accuracy: 0.9310
-Epoch 811/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4112 - val_accuracy: 0.9308
-Epoch 812/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4105 - val_accuracy: 0.9313
-Epoch 813/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4107 - val_accuracy: 0.9307
-Epoch 814/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4114 - val_accuracy: 0.9313
-Epoch 815/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4115 - val_accuracy: 0.9308
-Epoch 816/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9995 - val_loss: 0.4113 - val_accuracy: 0.9311
-Epoch 817/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4113 - val_accuracy: 0.9316
-Epoch 818/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9996 - val_loss: 0.4119 - val_accuracy: 0.9311
-Epoch 819/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4127 - val_accuracy: 0.9307
-Epoch 820/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4123 - val_accuracy: 0.9311
-Epoch 821/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4125 - val_accuracy: 0.9310
-Epoch 822/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9996 - val_loss: 0.4122 - val_accuracy: 0.9313
-Epoch 823/1000
-60000/60000 - 7s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4124 - val_accuracy: 0.9310
-Epoch 824/1000
-60000/60000 - 7s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4127 - val_accuracy: 0.9311
-Epoch 825/1000
-60000/60000 - 7s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4126 - val_accuracy: 0.9311
-Epoch 826/1000
-60000/60000 - 7s - loss: 0.0072 - accuracy: 0.9995 - val_loss: 0.4129 - val_accuracy: 0.9310
-Epoch 827/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4124 - val_accuracy: 0.9314
-Epoch 828/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4134 - val_accuracy: 0.9313
-Epoch 829/1000
-60000/60000 - 7s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4137 - val_accuracy: 0.9314
-Epoch 830/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9996 - val_loss: 0.4134 - val_accuracy: 0.9306
-Epoch 831/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4135 - val_accuracy: 0.9313
-Epoch 832/1000
-60000/60000 - 7s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4143 - val_accuracy: 0.9305
-Epoch 833/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4143 - val_accuracy: 0.9311
-Epoch 834/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4146 - val_accuracy: 0.9310
-Epoch 835/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4148 - val_accuracy: 0.9314
-Epoch 836/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4152 - val_accuracy: 0.9310
-Epoch 837/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4146 - val_accuracy: 0.9308
-Epoch 838/1000
-60000/60000 - 7s - loss: 0.0071 - accuracy: 0.9995 - val_loss: 0.4150 - val_accuracy: 0.9311
-Epoch 839/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4150 - val_accuracy: 0.9307
-Epoch 840/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4152 - val_accuracy: 0.9313
-Epoch 841/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9996 - val_loss: 0.4160 - val_accuracy: 0.9313
-Epoch 842/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9996 - val_loss: 0.4158 - val_accuracy: 0.9312
-Epoch 843/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4160 - val_accuracy: 0.9307
-Epoch 844/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4157 - val_accuracy: 0.9313
-Epoch 845/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4166 - val_accuracy: 0.9310
-Epoch 846/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9996 - val_loss: 0.4166 - val_accuracy: 0.9310
-Epoch 847/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4161 - val_accuracy: 0.9314
-Epoch 848/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4169 - val_accuracy: 0.9308
-Epoch 849/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9995 - val_loss: 0.4168 - val_accuracy: 0.9311
-Epoch 850/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9995 - val_loss: 0.4168 - val_accuracy: 0.9306
-Epoch 851/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9995 - val_loss: 0.4171 - val_accuracy: 0.9309
-Epoch 852/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9996 - val_loss: 0.4173 - val_accuracy: 0.9314
-Epoch 853/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9995 - val_loss: 0.4171 - val_accuracy: 0.9313
-Epoch 854/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9995 - val_loss: 0.4177 - val_accuracy: 0.9307
-Epoch 855/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9995 - val_loss: 0.4179 - val_accuracy: 0.9309
-Epoch 856/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9995 - val_loss: 0.4178 - val_accuracy: 0.9310
-Epoch 857/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9996 - val_loss: 0.4179 - val_accuracy: 0.9312
-Epoch 858/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9996 - val_loss: 0.4185 - val_accuracy: 0.9311
-Epoch 859/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9995 - val_loss: 0.4179 - val_accuracy: 0.9309
-Epoch 860/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9996 - val_loss: 0.4185 - val_accuracy: 0.9312
-Epoch 861/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4195 - val_accuracy: 0.9308
-Epoch 862/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4201 - val_accuracy: 0.9312
-Epoch 863/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9995 - val_loss: 0.4184 - val_accuracy: 0.9312
-Epoch 864/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4193 - val_accuracy: 0.9313
-Epoch 865/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4191 - val_accuracy: 0.9310
-Epoch 866/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9995 - val_loss: 0.4196 - val_accuracy: 0.9312
-Epoch 867/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4205 - val_accuracy: 0.9309
-Epoch 868/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9995 - val_loss: 0.4197 - val_accuracy: 0.9312
-Epoch 869/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4203 - val_accuracy: 0.9313
-Epoch 870/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9995 - val_loss: 0.4201 - val_accuracy: 0.9311
-Epoch 871/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4210 - val_accuracy: 0.9315
-Epoch 872/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4199 - val_accuracy: 0.9314
-Epoch 873/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4208 - val_accuracy: 0.9310
-Epoch 874/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4209 - val_accuracy: 0.9310
-Epoch 875/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9996 - val_loss: 0.4218 - val_accuracy: 0.9315
-Epoch 876/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4216 - val_accuracy: 0.9311
-Epoch 877/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9995 - val_loss: 0.4209 - val_accuracy: 0.9308
-Epoch 878/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4218 - val_accuracy: 0.9312
-Epoch 879/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4216 - val_accuracy: 0.9308
-Epoch 880/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4220 - val_accuracy: 0.9310
-Epoch 881/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4219 - val_accuracy: 0.9310
-Epoch 882/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9995 - val_loss: 0.4226 - val_accuracy: 0.9313
-Epoch 883/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4222 - val_accuracy: 0.9313
-Epoch 884/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4224 - val_accuracy: 0.9312
-Epoch 885/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9995 - val_loss: 0.4227 - val_accuracy: 0.9311
-Epoch 886/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4227 - val_accuracy: 0.9309
-Epoch 887/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9996 - val_loss: 0.4234 - val_accuracy: 0.9306
-Epoch 888/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4226 - val_accuracy: 0.9310
-Epoch 889/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4235 - val_accuracy: 0.9312
-Epoch 890/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4229 - val_accuracy: 0.9312
-Epoch 891/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4233 - val_accuracy: 0.9310
-Epoch 892/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4242 - val_accuracy: 0.9307
-Epoch 893/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4234 - val_accuracy: 0.9315
-Epoch 894/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4248 - val_accuracy: 0.9311
-Epoch 895/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4238 - val_accuracy: 0.9314
-Epoch 896/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4246 - val_accuracy: 0.9308
-Epoch 897/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4249 - val_accuracy: 0.9304
-Epoch 898/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4244 - val_accuracy: 0.9310
-Epoch 899/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4253 - val_accuracy: 0.9308
-Epoch 900/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4248 - val_accuracy: 0.9308
-Epoch 901/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9996 - val_loss: 0.4247 - val_accuracy: 0.9307
-Epoch 902/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4252 - val_accuracy: 0.9303
-Epoch 903/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4254 - val_accuracy: 0.9306
-Epoch 904/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4254 - val_accuracy: 0.9312
-Epoch 905/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4259 - val_accuracy: 0.9310
-Epoch 906/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4260 - val_accuracy: 0.9310
-Epoch 907/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4258 - val_accuracy: 0.9307
-Epoch 908/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4266 - val_accuracy: 0.9310
-Epoch 909/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4262 - val_accuracy: 0.9307
-Epoch 910/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4264 - val_accuracy: 0.9310
-Epoch 911/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4268 - val_accuracy: 0.9311
-Epoch 912/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4267 - val_accuracy: 0.9312
-Epoch 913/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4278 - val_accuracy: 0.9309
-Epoch 914/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9996 - val_loss: 0.4268 - val_accuracy: 0.9311
-Epoch 915/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4271 - val_accuracy: 0.9314
-Epoch 916/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4272 - val_accuracy: 0.9308
-Epoch 917/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4278 - val_accuracy: 0.9308
-Epoch 918/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4277 - val_accuracy: 0.9310
-Epoch 919/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4282 - val_accuracy: 0.9309
-Epoch 920/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4283 - val_accuracy: 0.9313
-Epoch 921/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4280 - val_accuracy: 0.9310
-Epoch 922/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4284 - val_accuracy: 0.9310
-Epoch 923/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4292 - val_accuracy: 0.9308
-Epoch 924/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4291 - val_accuracy: 0.9308
-Epoch 925/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4291 - val_accuracy: 0.9312
-Epoch 926/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4288 - val_accuracy: 0.9309
-Epoch 927/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4292 - val_accuracy: 0.9309
-Epoch 928/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4291 - val_accuracy: 0.9311
-Epoch 929/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9996 - val_loss: 0.4296 - val_accuracy: 0.9312
-Epoch 930/1000
-60000/60000 - 7s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4302 - val_accuracy: 0.9308
-Epoch 931/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4299 - val_accuracy: 0.9309
-Epoch 932/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4298 - val_accuracy: 0.9306
-Epoch 933/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4299 - val_accuracy: 0.9307
-Epoch 934/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4294 - val_accuracy: 0.9312
-Epoch 935/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4305 - val_accuracy: 0.9306
-Epoch 936/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4311 - val_accuracy: 0.9306
-Epoch 937/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4311 - val_accuracy: 0.9306
-Epoch 938/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4312 - val_accuracy: 0.9309
-Epoch 939/1000
-60000/60000 - 7s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4309 - val_accuracy: 0.9310
-Epoch 940/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4313 - val_accuracy: 0.9307
-Epoch 941/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4314 - val_accuracy: 0.9310
-Epoch 942/1000
-60000/60000 - 7s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4311 - val_accuracy: 0.9308
-Epoch 943/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4314 - val_accuracy: 0.9309
-Epoch 944/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4319 - val_accuracy: 0.9310
-Epoch 945/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9996 - val_loss: 0.4319 - val_accuracy: 0.9310
-Epoch 946/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4319 - val_accuracy: 0.9311
-Epoch 947/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4327 - val_accuracy: 0.9313
-Epoch 948/1000
-60000/60000 - 7s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4322 - val_accuracy: 0.9308
-Epoch 949/1000
-60000/60000 - 7s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4325 - val_accuracy: 0.9309
-Epoch 950/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4320 - val_accuracy: 0.9310
-Epoch 951/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4327 - val_accuracy: 0.9313
-Epoch 952/1000
-60000/60000 - 7s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4331 - val_accuracy: 0.9310
-Epoch 953/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4325 - val_accuracy: 0.9310
-Epoch 954/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4328 - val_accuracy: 0.9314
-Epoch 955/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4335 - val_accuracy: 0.9307
-Epoch 956/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4337 - val_accuracy: 0.9309
-Epoch 957/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4339 - val_accuracy: 0.9304
-Epoch 958/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4332 - val_accuracy: 0.9307
-Epoch 959/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9996 - val_loss: 0.4342 - val_accuracy: 0.9310
-Epoch 960/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4340 - val_accuracy: 0.9307
-Epoch 961/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4344 - val_accuracy: 0.9302
-Epoch 962/1000
-60000/60000 - 7s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4344 - val_accuracy: 0.9313
-Epoch 963/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4347 - val_accuracy: 0.9309
-Epoch 964/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4349 - val_accuracy: 0.9311
-Epoch 965/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4348 - val_accuracy: 0.9312
-Epoch 966/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4356 - val_accuracy: 0.9308
-Epoch 967/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4348 - val_accuracy: 0.9311
-Epoch 968/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4354 - val_accuracy: 0.9315
-Epoch 969/1000
-60000/60000 - 7s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4359 - val_accuracy: 0.9311
-Epoch 970/1000
-60000/60000 - 7s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4349 - val_accuracy: 0.9308
-Epoch 971/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4357 - val_accuracy: 0.9312
-Epoch 972/1000
-60000/60000 - 7s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4362 - val_accuracy: 0.9310
-Epoch 973/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4361 - val_accuracy: 0.9312
-Epoch 974/1000
-60000/60000 - 7s - loss: 0.0061 - accuracy: 0.9996 - val_loss: 0.4359 - val_accuracy: 0.9309
-Epoch 975/1000
-60000/60000 - 7s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4367 - val_accuracy: 0.9307
-Epoch 976/1000
-60000/60000 - 7s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4363 - val_accuracy: 0.9317
-Epoch 977/1000
-60000/60000 - 7s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4363 - val_accuracy: 0.9312
-Epoch 978/1000
-60000/60000 - 7s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4368 - val_accuracy: 0.9309
-Epoch 979/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4371 - val_accuracy: 0.9307
-Epoch 980/1000
-60000/60000 - 7s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4368 - val_accuracy: 0.9310
-Epoch 981/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4376 - val_accuracy: 0.9307
-Epoch 982/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4374 - val_accuracy: 0.9310
-Epoch 983/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4370 - val_accuracy: 0.9314
-Epoch 984/1000
-60000/60000 - 7s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4373 - val_accuracy: 0.9309
-Epoch 985/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4377 - val_accuracy: 0.9306
-Epoch 986/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4382 - val_accuracy: 0.9313
-Epoch 987/1000
-60000/60000 - 7s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4383 - val_accuracy: 0.9313
-Epoch 988/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9996 - val_loss: 0.4376 - val_accuracy: 0.9309
-Epoch 989/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4380 - val_accuracy: 0.9305
-Epoch 990/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4380 - val_accuracy: 0.9312
-Epoch 991/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4384 - val_accuracy: 0.9313
-Epoch 992/1000
-60000/60000 - 7s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4389 - val_accuracy: 0.9304
-Epoch 993/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4381 - val_accuracy: 0.9315
-Epoch 994/1000
-60000/60000 - 7s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4390 - val_accuracy: 0.9310
-Epoch 995/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4388 - val_accuracy: 0.9311
-Epoch 996/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4392 - val_accuracy: 0.9311
-Epoch 997/1000
-60000/60000 - 7s - loss: 0.0059 - accuracy: 0.9996 - val_loss: 0.4393 - val_accuracy: 0.9309
-Epoch 998/1000
-60000/60000 - 7s - loss: 0.0058 - accuracy: 0.9996 - val_loss: 0.4391 - val_accuracy: 0.9312
-Epoch 999/1000
-60000/60000 - 7s - loss: 0.0058 - accuracy: 0.9996 - val_loss: 0.4398 - val_accuracy: 0.9309
-Epoch 1000/1000
-60000/60000 - 7s - loss: 0.0058 - accuracy: 0.9996 - val_loss: 0.4402 - val_accuracy: 0.9313
-Test loss was 0.1972, test accuracy was 0.9472
diff --git a/MNIST/nonlinear_withsigmoid_pnweights.txt b/MNIST/nonlinear_withsigmoid_pnweights.txt
deleted file mode 100644
index 73a54ee..0000000
--- a/MNIST/nonlinear_withsigmoid_pnweights.txt
+++ /dev/null
@@ -1,2005 +0,0 @@
-X_train shape: (60000, 784)
-60000 train samples
-10000 test samples
-Train on 60000 samples, validate on 10000 samples
-Epoch 1/1000
-60000/60000 - 9s - loss: 0.5901 - accuracy: 0.8227 - val_loss: 0.3636 - val_accuracy: 0.8920
-Epoch 2/1000
-60000/60000 - 6s - loss: 0.3389 - accuracy: 0.9004 - val_loss: 0.3002 - val_accuracy: 0.9129
-Epoch 3/1000
-60000/60000 - 6s - loss: 0.2826 - accuracy: 0.9175 - val_loss: 0.2706 - val_accuracy: 0.9213
-Epoch 4/1000
-60000/60000 - 6s - loss: 0.2505 - accuracy: 0.9276 - val_loss: 0.2507 - val_accuracy: 0.9272
-Epoch 5/1000
-60000/60000 - 6s - loss: 0.2284 - accuracy: 0.9330 - val_loss: 0.2375 - val_accuracy: 0.9328
-Epoch 6/1000
-60000/60000 - 6s - loss: 0.2117 - accuracy: 0.9392 - val_loss: 0.2283 - val_accuracy: 0.9327
-Epoch 7/1000
-60000/60000 - 6s - loss: 0.1986 - accuracy: 0.9422 - val_loss: 0.2207 - val_accuracy: 0.9369
-Epoch 8/1000
-60000/60000 - 6s - loss: 0.1876 - accuracy: 0.9456 - val_loss: 0.2144 - val_accuracy: 0.9371
-Epoch 9/1000
-60000/60000 - 6s - loss: 0.1786 - accuracy: 0.9487 - val_loss: 0.2112 - val_accuracy: 0.9377
-Epoch 10/1000
-60000/60000 - 6s - loss: 0.1705 - accuracy: 0.9511 - val_loss: 0.2048 - val_accuracy: 0.9399
-Epoch 11/1000
-60000/60000 - 6s - loss: 0.1637 - accuracy: 0.9538 - val_loss: 0.2026 - val_accuracy: 0.9420
-Epoch 12/1000
-60000/60000 - 6s - loss: 0.1578 - accuracy: 0.9554 - val_loss: 0.2030 - val_accuracy: 0.9399
-Epoch 13/1000
-60000/60000 - 6s - loss: 0.1518 - accuracy: 0.9566 - val_loss: 0.1977 - val_accuracy: 0.9409
-Epoch 14/1000
-60000/60000 - 6s - loss: 0.1468 - accuracy: 0.9578 - val_loss: 0.1965 - val_accuracy: 0.9417
-Epoch 15/1000
-60000/60000 - 6s - loss: 0.1424 - accuracy: 0.9596 - val_loss: 0.1965 - val_accuracy: 0.9424
-Epoch 16/1000
-60000/60000 - 6s - loss: 0.1381 - accuracy: 0.9609 - val_loss: 0.1953 - val_accuracy: 0.9416
-Epoch 17/1000
-60000/60000 - 6s - loss: 0.1340 - accuracy: 0.9620 - val_loss: 0.1927 - val_accuracy: 0.9429
-Epoch 18/1000
-60000/60000 - 6s - loss: 0.1305 - accuracy: 0.9632 - val_loss: 0.1912 - val_accuracy: 0.9436
-Epoch 19/1000
-60000/60000 - 6s - loss: 0.1269 - accuracy: 0.9636 - val_loss: 0.1908 - val_accuracy: 0.9447
-Epoch 20/1000
-60000/60000 - 6s - loss: 0.1236 - accuracy: 0.9654 - val_loss: 0.1898 - val_accuracy: 0.9444
-Epoch 21/1000
-60000/60000 - 6s - loss: 0.1207 - accuracy: 0.9658 - val_loss: 0.1908 - val_accuracy: 0.9454
-Epoch 22/1000
-60000/60000 - 6s - loss: 0.1179 - accuracy: 0.9664 - val_loss: 0.1896 - val_accuracy: 0.9452
-Epoch 23/1000
-60000/60000 - 6s - loss: 0.1152 - accuracy: 0.9678 - val_loss: 0.1914 - val_accuracy: 0.9450
-Epoch 24/1000
-60000/60000 - 6s - loss: 0.1125 - accuracy: 0.9684 - val_loss: 0.1908 - val_accuracy: 0.9436
-Epoch 25/1000
-60000/60000 - 6s - loss: 0.1103 - accuracy: 0.9685 - val_loss: 0.1903 - val_accuracy: 0.9462
-Epoch 26/1000
-60000/60000 - 6s - loss: 0.1079 - accuracy: 0.9697 - val_loss: 0.1893 - val_accuracy: 0.9461
-Epoch 27/1000
-60000/60000 - 6s - loss: 0.1053 - accuracy: 0.9710 - val_loss: 0.1942 - val_accuracy: 0.9446
-Epoch 28/1000
-60000/60000 - 6s - loss: 0.1036 - accuracy: 0.9709 - val_loss: 0.1919 - val_accuracy: 0.9436
-Epoch 29/1000
-60000/60000 - 6s - loss: 0.1011 - accuracy: 0.9718 - val_loss: 0.1925 - val_accuracy: 0.9439
-Epoch 30/1000
-60000/60000 - 6s - loss: 0.0998 - accuracy: 0.9722 - val_loss: 0.1942 - val_accuracy: 0.9442
-Epoch 31/1000
-60000/60000 - 6s - loss: 0.0976 - accuracy: 0.9731 - val_loss: 0.1924 - val_accuracy: 0.9453
-Epoch 32/1000
-60000/60000 - 6s - loss: 0.0959 - accuracy: 0.9735 - val_loss: 0.1951 - val_accuracy: 0.9439
-Epoch 33/1000
-60000/60000 - 6s - loss: 0.0942 - accuracy: 0.9742 - val_loss: 0.1923 - val_accuracy: 0.9450
-Epoch 34/1000
-60000/60000 - 6s - loss: 0.0926 - accuracy: 0.9745 - val_loss: 0.1937 - val_accuracy: 0.9453
-Epoch 35/1000
-60000/60000 - 6s - loss: 0.0912 - accuracy: 0.9748 - val_loss: 0.1901 - val_accuracy: 0.9459
-Epoch 36/1000
-60000/60000 - 6s - loss: 0.0896 - accuracy: 0.9755 - val_loss: 0.1929 - val_accuracy: 0.9449
-Epoch 37/1000
-60000/60000 - 6s - loss: 0.0882 - accuracy: 0.9761 - val_loss: 0.1937 - val_accuracy: 0.9457
-Epoch 38/1000
-60000/60000 - 6s - loss: 0.0870 - accuracy: 0.9762 - val_loss: 0.1930 - val_accuracy: 0.9450
-Epoch 39/1000
-60000/60000 - 6s - loss: 0.0856 - accuracy: 0.9766 - val_loss: 0.1956 - val_accuracy: 0.9450
-Epoch 40/1000
-60000/60000 - 6s - loss: 0.0843 - accuracy: 0.9771 - val_loss: 0.1949 - val_accuracy: 0.9446
-Epoch 41/1000
-60000/60000 - 6s - loss: 0.0831 - accuracy: 0.9773 - val_loss: 0.2003 - val_accuracy: 0.9432
-Epoch 42/1000
-60000/60000 - 6s - loss: 0.0818 - accuracy: 0.9783 - val_loss: 0.1943 - val_accuracy: 0.9447
-Epoch 43/1000
-60000/60000 - 6s - loss: 0.0804 - accuracy: 0.9786 - val_loss: 0.1968 - val_accuracy: 0.9443
-Epoch 44/1000
-60000/60000 - 6s - loss: 0.0794 - accuracy: 0.9791 - val_loss: 0.1964 - val_accuracy: 0.9450
-Epoch 45/1000
-60000/60000 - 6s - loss: 0.0782 - accuracy: 0.9790 - val_loss: 0.1993 - val_accuracy: 0.9440
-Epoch 46/1000
-60000/60000 - 6s - loss: 0.0769 - accuracy: 0.9794 - val_loss: 0.2010 - val_accuracy: 0.9434
-Epoch 47/1000
-60000/60000 - 6s - loss: 0.0759 - accuracy: 0.9801 - val_loss: 0.1977 - val_accuracy: 0.9438
-Epoch 48/1000
-60000/60000 - 6s - loss: 0.0751 - accuracy: 0.9801 - val_loss: 0.1995 - val_accuracy: 0.9435
-Epoch 49/1000
-60000/60000 - 6s - loss: 0.0739 - accuracy: 0.9808 - val_loss: 0.2001 - val_accuracy: 0.9433
-Epoch 50/1000
-60000/60000 - 6s - loss: 0.0732 - accuracy: 0.9804 - val_loss: 0.2018 - val_accuracy: 0.9427
-Epoch 51/1000
-60000/60000 - 6s - loss: 0.0718 - accuracy: 0.9815 - val_loss: 0.2047 - val_accuracy: 0.9427
-Epoch 52/1000
-60000/60000 - 6s - loss: 0.0711 - accuracy: 0.9814 - val_loss: 0.2048 - val_accuracy: 0.9422
-Epoch 53/1000
-60000/60000 - 6s - loss: 0.0702 - accuracy: 0.9820 - val_loss: 0.2052 - val_accuracy: 0.9415
-Epoch 54/1000
-60000/60000 - 6s - loss: 0.0693 - accuracy: 0.9817 - val_loss: 0.2054 - val_accuracy: 0.9427
-Epoch 55/1000
-60000/60000 - 6s - loss: 0.0683 - accuracy: 0.9823 - val_loss: 0.2058 - val_accuracy: 0.9430
-Epoch 56/1000
-60000/60000 - 6s - loss: 0.0677 - accuracy: 0.9827 - val_loss: 0.2076 - val_accuracy: 0.9434
-Epoch 57/1000
-60000/60000 - 6s - loss: 0.0667 - accuracy: 0.9827 - val_loss: 0.2078 - val_accuracy: 0.9428
-Epoch 58/1000
-60000/60000 - 6s - loss: 0.0660 - accuracy: 0.9830 - val_loss: 0.2092 - val_accuracy: 0.9415
-Epoch 59/1000
-60000/60000 - 6s - loss: 0.0652 - accuracy: 0.9834 - val_loss: 0.2085 - val_accuracy: 0.9424
-Epoch 60/1000
-60000/60000 - 6s - loss: 0.0645 - accuracy: 0.9837 - val_loss: 0.2099 - val_accuracy: 0.9428
-Epoch 61/1000
-60000/60000 - 6s - loss: 0.0636 - accuracy: 0.9840 - val_loss: 0.2121 - val_accuracy: 0.9420
-Epoch 62/1000
-60000/60000 - 6s - loss: 0.0629 - accuracy: 0.9844 - val_loss: 0.2128 - val_accuracy: 0.9419
-Epoch 63/1000
-60000/60000 - 6s - loss: 0.0618 - accuracy: 0.9847 - val_loss: 0.2100 - val_accuracy: 0.9427
-Epoch 64/1000
-60000/60000 - 6s - loss: 0.0616 - accuracy: 0.9847 - val_loss: 0.2118 - val_accuracy: 0.9422
-Epoch 65/1000
-60000/60000 - 6s - loss: 0.0608 - accuracy: 0.9853 - val_loss: 0.2161 - val_accuracy: 0.9415
-Epoch 66/1000
-60000/60000 - 6s - loss: 0.0602 - accuracy: 0.9850 - val_loss: 0.2135 - val_accuracy: 0.9414
-Epoch 67/1000
-60000/60000 - 6s - loss: 0.0593 - accuracy: 0.9856 - val_loss: 0.2153 - val_accuracy: 0.9411
-Epoch 68/1000
-60000/60000 - 6s - loss: 0.0590 - accuracy: 0.9854 - val_loss: 0.2167 - val_accuracy: 0.9422
-Epoch 69/1000
-60000/60000 - 6s - loss: 0.0584 - accuracy: 0.9855 - val_loss: 0.2169 - val_accuracy: 0.9417
-Epoch 70/1000
-60000/60000 - 6s - loss: 0.0575 - accuracy: 0.9860 - val_loss: 0.2182 - val_accuracy: 0.9415
-Epoch 71/1000
-60000/60000 - 6s - loss: 0.0569 - accuracy: 0.9863 - val_loss: 0.2172 - val_accuracy: 0.9422
-Epoch 72/1000
-60000/60000 - 6s - loss: 0.0561 - accuracy: 0.9871 - val_loss: 0.2200 - val_accuracy: 0.9419
-Epoch 73/1000
-60000/60000 - 6s - loss: 0.0555 - accuracy: 0.9871 - val_loss: 0.2217 - val_accuracy: 0.9412
-Epoch 74/1000
-60000/60000 - 6s - loss: 0.0549 - accuracy: 0.9868 - val_loss: 0.2209 - val_accuracy: 0.9427
-Epoch 75/1000
-60000/60000 - 6s - loss: 0.0547 - accuracy: 0.9873 - val_loss: 0.2214 - val_accuracy: 0.9402
-Epoch 76/1000
-60000/60000 - 6s - loss: 0.0537 - accuracy: 0.9872 - val_loss: 0.2229 - val_accuracy: 0.9405
-Epoch 77/1000
-60000/60000 - 6s - loss: 0.0531 - accuracy: 0.9878 - val_loss: 0.2218 - val_accuracy: 0.9414
-Epoch 78/1000
-60000/60000 - 6s - loss: 0.0527 - accuracy: 0.9877 - val_loss: 0.2253 - val_accuracy: 0.9412
-Epoch 79/1000
-60000/60000 - 6s - loss: 0.0521 - accuracy: 0.9879 - val_loss: 0.2252 - val_accuracy: 0.9404
-Epoch 80/1000
-60000/60000 - 6s - loss: 0.0520 - accuracy: 0.9878 - val_loss: 0.2267 - val_accuracy: 0.9409
-Epoch 81/1000
-60000/60000 - 6s - loss: 0.0513 - accuracy: 0.9880 - val_loss: 0.2256 - val_accuracy: 0.9409
-Epoch 82/1000
-60000/60000 - 6s - loss: 0.0506 - accuracy: 0.9885 - val_loss: 0.2259 - val_accuracy: 0.9414
-Epoch 83/1000
-60000/60000 - 6s - loss: 0.0503 - accuracy: 0.9887 - val_loss: 0.2289 - val_accuracy: 0.9406
-Epoch 84/1000
-60000/60000 - 6s - loss: 0.0498 - accuracy: 0.9891 - val_loss: 0.2266 - val_accuracy: 0.9408
-Epoch 85/1000
-60000/60000 - 6s - loss: 0.0493 - accuracy: 0.9886 - val_loss: 0.2293 - val_accuracy: 0.9410
-Epoch 86/1000
-60000/60000 - 6s - loss: 0.0487 - accuracy: 0.9889 - val_loss: 0.2309 - val_accuracy: 0.9408
-Epoch 87/1000
-60000/60000 - 6s - loss: 0.0483 - accuracy: 0.9889 - val_loss: 0.2366 - val_accuracy: 0.9390
-Epoch 88/1000
-60000/60000 - 6s - loss: 0.0478 - accuracy: 0.9894 - val_loss: 0.2317 - val_accuracy: 0.9399
-Epoch 89/1000
-60000/60000 - 6s - loss: 0.0475 - accuracy: 0.9894 - val_loss: 0.2347 - val_accuracy: 0.9402
-Epoch 90/1000
-60000/60000 - 6s - loss: 0.0471 - accuracy: 0.9898 - val_loss: 0.2342 - val_accuracy: 0.9401
-Epoch 91/1000
-60000/60000 - 6s - loss: 0.0465 - accuracy: 0.9899 - val_loss: 0.2350 - val_accuracy: 0.9405
-Epoch 92/1000
-60000/60000 - 6s - loss: 0.0460 - accuracy: 0.9901 - val_loss: 0.2354 - val_accuracy: 0.9411
-Epoch 93/1000
-60000/60000 - 6s - loss: 0.0456 - accuracy: 0.9901 - val_loss: 0.2370 - val_accuracy: 0.9404
-Epoch 94/1000
-60000/60000 - 6s - loss: 0.0452 - accuracy: 0.9901 - val_loss: 0.2368 - val_accuracy: 0.9392
-Epoch 95/1000
-60000/60000 - 6s - loss: 0.0448 - accuracy: 0.9905 - val_loss: 0.2369 - val_accuracy: 0.9403
-Epoch 96/1000
-60000/60000 - 6s - loss: 0.0444 - accuracy: 0.9906 - val_loss: 0.2380 - val_accuracy: 0.9404
-Epoch 97/1000
-60000/60000 - 6s - loss: 0.0442 - accuracy: 0.9904 - val_loss: 0.2397 - val_accuracy: 0.9402
-Epoch 98/1000
-60000/60000 - 6s - loss: 0.0438 - accuracy: 0.9907 - val_loss: 0.2405 - val_accuracy: 0.9397
-Epoch 99/1000
-60000/60000 - 6s - loss: 0.0434 - accuracy: 0.9907 - val_loss: 0.2444 - val_accuracy: 0.9391
-Epoch 100/1000
-60000/60000 - 6s - loss: 0.0429 - accuracy: 0.9913 - val_loss: 0.2402 - val_accuracy: 0.9394
-Epoch 101/1000
-60000/60000 - 6s - loss: 0.0429 - accuracy: 0.9908 - val_loss: 0.2425 - val_accuracy: 0.9401
-Epoch 102/1000
-60000/60000 - 6s - loss: 0.0421 - accuracy: 0.9915 - val_loss: 0.2423 - val_accuracy: 0.9399
-Epoch 103/1000
-60000/60000 - 6s - loss: 0.0419 - accuracy: 0.9915 - val_loss: 0.2439 - val_accuracy: 0.9389
-Epoch 104/1000
-60000/60000 - 6s - loss: 0.0415 - accuracy: 0.9916 - val_loss: 0.2448 - val_accuracy: 0.9391
-Epoch 105/1000
-60000/60000 - 6s - loss: 0.0415 - accuracy: 0.9915 - val_loss: 0.2441 - val_accuracy: 0.9402
-Epoch 106/1000
-60000/60000 - 6s - loss: 0.0407 - accuracy: 0.9916 - val_loss: 0.2464 - val_accuracy: 0.9396
-Epoch 107/1000
-60000/60000 - 6s - loss: 0.0408 - accuracy: 0.9916 - val_loss: 0.2467 - val_accuracy: 0.9387
-Epoch 108/1000
-60000/60000 - 6s - loss: 0.0402 - accuracy: 0.9919 - val_loss: 0.2480 - val_accuracy: 0.9385
-Epoch 109/1000
-60000/60000 - 6s - loss: 0.0400 - accuracy: 0.9919 - val_loss: 0.2487 - val_accuracy: 0.9388
-Epoch 110/1000
-60000/60000 - 6s - loss: 0.0395 - accuracy: 0.9918 - val_loss: 0.2500 - val_accuracy: 0.9390
-Epoch 111/1000
-60000/60000 - 6s - loss: 0.0394 - accuracy: 0.9923 - val_loss: 0.2514 - val_accuracy: 0.9383
-Epoch 112/1000
-60000/60000 - 6s - loss: 0.0390 - accuracy: 0.9922 - val_loss: 0.2501 - val_accuracy: 0.9381
-Epoch 113/1000
-60000/60000 - 6s - loss: 0.0387 - accuracy: 0.9922 - val_loss: 0.2520 - val_accuracy: 0.9390
-Epoch 114/1000
-60000/60000 - 6s - loss: 0.0384 - accuracy: 0.9926 - val_loss: 0.2533 - val_accuracy: 0.9376
-Epoch 115/1000
-60000/60000 - 6s - loss: 0.0383 - accuracy: 0.9927 - val_loss: 0.2559 - val_accuracy: 0.9384
-Epoch 116/1000
-60000/60000 - 6s - loss: 0.0378 - accuracy: 0.9926 - val_loss: 0.2552 - val_accuracy: 0.9384
-Epoch 117/1000
-60000/60000 - 6s - loss: 0.0374 - accuracy: 0.9928 - val_loss: 0.2533 - val_accuracy: 0.9393
-Epoch 118/1000
-60000/60000 - 6s - loss: 0.0374 - accuracy: 0.9928 - val_loss: 0.2552 - val_accuracy: 0.9388
-Epoch 119/1000
-60000/60000 - 6s - loss: 0.0370 - accuracy: 0.9929 - val_loss: 0.2540 - val_accuracy: 0.9398
-Epoch 120/1000
-60000/60000 - 6s - loss: 0.0368 - accuracy: 0.9932 - val_loss: 0.2558 - val_accuracy: 0.9400
-Epoch 121/1000
-60000/60000 - 6s - loss: 0.0364 - accuracy: 0.9931 - val_loss: 0.2589 - val_accuracy: 0.9387
-Epoch 122/1000
-60000/60000 - 6s - loss: 0.0362 - accuracy: 0.9931 - val_loss: 0.2586 - val_accuracy: 0.9391
-Epoch 123/1000
-60000/60000 - 6s - loss: 0.0360 - accuracy: 0.9933 - val_loss: 0.2587 - val_accuracy: 0.9386
-Epoch 124/1000
-60000/60000 - 6s - loss: 0.0357 - accuracy: 0.9932 - val_loss: 0.2592 - val_accuracy: 0.9389
-Epoch 125/1000
-60000/60000 - 6s - loss: 0.0356 - accuracy: 0.9932 - val_loss: 0.2592 - val_accuracy: 0.9388
-Epoch 126/1000
-60000/60000 - 6s - loss: 0.0353 - accuracy: 0.9935 - val_loss: 0.2628 - val_accuracy: 0.9384
-Epoch 127/1000
-60000/60000 - 6s - loss: 0.0350 - accuracy: 0.9937 - val_loss: 0.2617 - val_accuracy: 0.9394
-Epoch 128/1000
-60000/60000 - 6s - loss: 0.0347 - accuracy: 0.9936 - val_loss: 0.2625 - val_accuracy: 0.9381
-Epoch 129/1000
-60000/60000 - 6s - loss: 0.0346 - accuracy: 0.9935 - val_loss: 0.2637 - val_accuracy: 0.9391
-Epoch 130/1000
-60000/60000 - 6s - loss: 0.0343 - accuracy: 0.9936 - val_loss: 0.2609 - val_accuracy: 0.9389
-Epoch 131/1000
-60000/60000 - 6s - loss: 0.0340 - accuracy: 0.9937 - val_loss: 0.2656 - val_accuracy: 0.9376
-Epoch 132/1000
-60000/60000 - 6s - loss: 0.0336 - accuracy: 0.9941 - val_loss: 0.2646 - val_accuracy: 0.9394
-Epoch 133/1000
-60000/60000 - 6s - loss: 0.0336 - accuracy: 0.9937 - val_loss: 0.2636 - val_accuracy: 0.9388
-Epoch 134/1000
-60000/60000 - 6s - loss: 0.0333 - accuracy: 0.9941 - val_loss: 0.2642 - val_accuracy: 0.9386
-Epoch 135/1000
-60000/60000 - 6s - loss: 0.0331 - accuracy: 0.9943 - val_loss: 0.2670 - val_accuracy: 0.9395
-Epoch 136/1000
-60000/60000 - 6s - loss: 0.0331 - accuracy: 0.9939 - val_loss: 0.2665 - val_accuracy: 0.9373
-Epoch 137/1000
-60000/60000 - 6s - loss: 0.0328 - accuracy: 0.9941 - val_loss: 0.2678 - val_accuracy: 0.9393
-Epoch 138/1000
-60000/60000 - 6s - loss: 0.0326 - accuracy: 0.9941 - val_loss: 0.2718 - val_accuracy: 0.9385
-Epoch 139/1000
-60000/60000 - 6s - loss: 0.0322 - accuracy: 0.9943 - val_loss: 0.2700 - val_accuracy: 0.9382
-Epoch 140/1000
-60000/60000 - 6s - loss: 0.0320 - accuracy: 0.9944 - val_loss: 0.2687 - val_accuracy: 0.9394
-Epoch 141/1000
-60000/60000 - 6s - loss: 0.0318 - accuracy: 0.9946 - val_loss: 0.2698 - val_accuracy: 0.9379
-Epoch 142/1000
-60000/60000 - 6s - loss: 0.0317 - accuracy: 0.9944 - val_loss: 0.2700 - val_accuracy: 0.9381
-Epoch 143/1000
-60000/60000 - 6s - loss: 0.0315 - accuracy: 0.9942 - val_loss: 0.2721 - val_accuracy: 0.9389
-Epoch 144/1000
-60000/60000 - 6s - loss: 0.0312 - accuracy: 0.9947 - val_loss: 0.2725 - val_accuracy: 0.9387
-Epoch 145/1000
-60000/60000 - 6s - loss: 0.0310 - accuracy: 0.9945 - val_loss: 0.2714 - val_accuracy: 0.9388
-Epoch 146/1000
-60000/60000 - 6s - loss: 0.0309 - accuracy: 0.9949 - val_loss: 0.2743 - val_accuracy: 0.9388
-Epoch 147/1000
-60000/60000 - 6s - loss: 0.0306 - accuracy: 0.9947 - val_loss: 0.2725 - val_accuracy: 0.9389
-Epoch 148/1000
-60000/60000 - 6s - loss: 0.0303 - accuracy: 0.9947 - val_loss: 0.2758 - val_accuracy: 0.9376
-Epoch 149/1000
-60000/60000 - 6s - loss: 0.0303 - accuracy: 0.9950 - val_loss: 0.2759 - val_accuracy: 0.9392
-Epoch 150/1000
-60000/60000 - 6s - loss: 0.0301 - accuracy: 0.9949 - val_loss: 0.2765 - val_accuracy: 0.9395
-Epoch 151/1000
-60000/60000 - 6s - loss: 0.0299 - accuracy: 0.9948 - val_loss: 0.2763 - val_accuracy: 0.9384
-Epoch 152/1000
-60000/60000 - 6s - loss: 0.0297 - accuracy: 0.9948 - val_loss: 0.2774 - val_accuracy: 0.9382
-Epoch 153/1000
-60000/60000 - 6s - loss: 0.0295 - accuracy: 0.9949 - val_loss: 0.2785 - val_accuracy: 0.9375
-Epoch 154/1000
-60000/60000 - 6s - loss: 0.0294 - accuracy: 0.9950 - val_loss: 0.2789 - val_accuracy: 0.9378
-Epoch 155/1000
-60000/60000 - 6s - loss: 0.0292 - accuracy: 0.9950 - val_loss: 0.2786 - val_accuracy: 0.9382
-Epoch 156/1000
-60000/60000 - 6s - loss: 0.0291 - accuracy: 0.9952 - val_loss: 0.2796 - val_accuracy: 0.9368
-Epoch 157/1000
-60000/60000 - 6s - loss: 0.0288 - accuracy: 0.9953 - val_loss: 0.2806 - val_accuracy: 0.9382
-Epoch 158/1000
-60000/60000 - 6s - loss: 0.0287 - accuracy: 0.9952 - val_loss: 0.2800 - val_accuracy: 0.9384
-Epoch 159/1000
-60000/60000 - 6s - loss: 0.0285 - accuracy: 0.9954 - val_loss: 0.2808 - val_accuracy: 0.9370
-Epoch 160/1000
-60000/60000 - 6s - loss: 0.0283 - accuracy: 0.9954 - val_loss: 0.2815 - val_accuracy: 0.9375
-Epoch 161/1000
-60000/60000 - 6s - loss: 0.0281 - accuracy: 0.9953 - val_loss: 0.2843 - val_accuracy: 0.9386
-Epoch 162/1000
-60000/60000 - 6s - loss: 0.0281 - accuracy: 0.9955 - val_loss: 0.2831 - val_accuracy: 0.9380
-Epoch 163/1000
-60000/60000 - 6s - loss: 0.0278 - accuracy: 0.9958 - val_loss: 0.2829 - val_accuracy: 0.9382
-Epoch 164/1000
-60000/60000 - 6s - loss: 0.0276 - accuracy: 0.9957 - val_loss: 0.2836 - val_accuracy: 0.9379
-Epoch 165/1000
-60000/60000 - 6s - loss: 0.0276 - accuracy: 0.9955 - val_loss: 0.2861 - val_accuracy: 0.9364
-Epoch 166/1000
-60000/60000 - 6s - loss: 0.0274 - accuracy: 0.9957 - val_loss: 0.2867 - val_accuracy: 0.9369
-Epoch 167/1000
-60000/60000 - 6s - loss: 0.0272 - accuracy: 0.9957 - val_loss: 0.2851 - val_accuracy: 0.9369
-Epoch 168/1000
-60000/60000 - 6s - loss: 0.0271 - accuracy: 0.9959 - val_loss: 0.2860 - val_accuracy: 0.9372
-Epoch 169/1000
-60000/60000 - 6s - loss: 0.0269 - accuracy: 0.9959 - val_loss: 0.2887 - val_accuracy: 0.9380
-Epoch 170/1000
-60000/60000 - 6s - loss: 0.0269 - accuracy: 0.9959 - val_loss: 0.2884 - val_accuracy: 0.9380
-Epoch 171/1000
-60000/60000 - 6s - loss: 0.0266 - accuracy: 0.9958 - val_loss: 0.2874 - val_accuracy: 0.9376
-Epoch 172/1000
-60000/60000 - 6s - loss: 0.0265 - accuracy: 0.9959 - val_loss: 0.2881 - val_accuracy: 0.9372
-Epoch 173/1000
-60000/60000 - 6s - loss: 0.0263 - accuracy: 0.9961 - val_loss: 0.2893 - val_accuracy: 0.9372
-Epoch 174/1000
-60000/60000 - 6s - loss: 0.0262 - accuracy: 0.9959 - val_loss: 0.2872 - val_accuracy: 0.9378
-Epoch 175/1000
-60000/60000 - 6s - loss: 0.0261 - accuracy: 0.9960 - val_loss: 0.2893 - val_accuracy: 0.9379
-Epoch 176/1000
-60000/60000 - 6s - loss: 0.0259 - accuracy: 0.9959 - val_loss: 0.2905 - val_accuracy: 0.9381
-Epoch 177/1000
-60000/60000 - 6s - loss: 0.0259 - accuracy: 0.9961 - val_loss: 0.2909 - val_accuracy: 0.9381
-Epoch 178/1000
-60000/60000 - 6s - loss: 0.0257 - accuracy: 0.9961 - val_loss: 0.2899 - val_accuracy: 0.9371
-Epoch 179/1000
-60000/60000 - 6s - loss: 0.0255 - accuracy: 0.9962 - val_loss: 0.2916 - val_accuracy: 0.9373
-Epoch 180/1000
-60000/60000 - 6s - loss: 0.0255 - accuracy: 0.9962 - val_loss: 0.2919 - val_accuracy: 0.9373
-Epoch 181/1000
-60000/60000 - 6s - loss: 0.0253 - accuracy: 0.9962 - val_loss: 0.2930 - val_accuracy: 0.9383
-Epoch 182/1000
-60000/60000 - 6s - loss: 0.0251 - accuracy: 0.9964 - val_loss: 0.2937 - val_accuracy: 0.9374
-Epoch 183/1000
-60000/60000 - 6s - loss: 0.0250 - accuracy: 0.9962 - val_loss: 0.2930 - val_accuracy: 0.9382
-Epoch 184/1000
-60000/60000 - 6s - loss: 0.0249 - accuracy: 0.9963 - val_loss: 0.2949 - val_accuracy: 0.9369
-Epoch 185/1000
-60000/60000 - 6s - loss: 0.0247 - accuracy: 0.9962 - val_loss: 0.2941 - val_accuracy: 0.9373
-Epoch 186/1000
-60000/60000 - 6s - loss: 0.0246 - accuracy: 0.9965 - val_loss: 0.2963 - val_accuracy: 0.9370
-Epoch 187/1000
-60000/60000 - 6s - loss: 0.0245 - accuracy: 0.9965 - val_loss: 0.2955 - val_accuracy: 0.9377
-Epoch 188/1000
-60000/60000 - 6s - loss: 0.0245 - accuracy: 0.9962 - val_loss: 0.2960 - val_accuracy: 0.9377
-Epoch 189/1000
-60000/60000 - 6s - loss: 0.0243 - accuracy: 0.9968 - val_loss: 0.2968 - val_accuracy: 0.9371
-Epoch 190/1000
-60000/60000 - 6s - loss: 0.0242 - accuracy: 0.9965 - val_loss: 0.2967 - val_accuracy: 0.9368
-Epoch 191/1000
-60000/60000 - 6s - loss: 0.0240 - accuracy: 0.9966 - val_loss: 0.2998 - val_accuracy: 0.9374
-Epoch 192/1000
-60000/60000 - 6s - loss: 0.0239 - accuracy: 0.9966 - val_loss: 0.2985 - val_accuracy: 0.9381
-Epoch 193/1000
-60000/60000 - 6s - loss: 0.0238 - accuracy: 0.9965 - val_loss: 0.2977 - val_accuracy: 0.9389
-Epoch 194/1000
-60000/60000 - 6s - loss: 0.0237 - accuracy: 0.9968 - val_loss: 0.2991 - val_accuracy: 0.9378
-Epoch 195/1000
-60000/60000 - 6s - loss: 0.0235 - accuracy: 0.9967 - val_loss: 0.3000 - val_accuracy: 0.9370
-Epoch 196/1000
-60000/60000 - 6s - loss: 0.0234 - accuracy: 0.9967 - val_loss: 0.2990 - val_accuracy: 0.9389
-Epoch 197/1000
-60000/60000 - 6s - loss: 0.0234 - accuracy: 0.9968 - val_loss: 0.3009 - val_accuracy: 0.9383
-Epoch 198/1000
-60000/60000 - 6s - loss: 0.0234 - accuracy: 0.9968 - val_loss: 0.3015 - val_accuracy: 0.9382
-Epoch 199/1000
-60000/60000 - 6s - loss: 0.0232 - accuracy: 0.9968 - val_loss: 0.3029 - val_accuracy: 0.9370
-Epoch 200/1000
-60000/60000 - 6s - loss: 0.0231 - accuracy: 0.9969 - val_loss: 0.3033 - val_accuracy: 0.9374
-Epoch 201/1000
-60000/60000 - 6s - loss: 0.0229 - accuracy: 0.9968 - val_loss: 0.3019 - val_accuracy: 0.9365
-Epoch 202/1000
-60000/60000 - 6s - loss: 0.0228 - accuracy: 0.9969 - val_loss: 0.3023 - val_accuracy: 0.9374
-Epoch 203/1000
-60000/60000 - 6s - loss: 0.0226 - accuracy: 0.9969 - val_loss: 0.3045 - val_accuracy: 0.9370
-Epoch 204/1000
-60000/60000 - 6s - loss: 0.0227 - accuracy: 0.9968 - val_loss: 0.3046 - val_accuracy: 0.9372
-Epoch 205/1000
-60000/60000 - 6s - loss: 0.0225 - accuracy: 0.9970 - val_loss: 0.3041 - val_accuracy: 0.9375
-Epoch 206/1000
-60000/60000 - 6s - loss: 0.0224 - accuracy: 0.9968 - val_loss: 0.3038 - val_accuracy: 0.9386
-Epoch 207/1000
-60000/60000 - 6s - loss: 0.0223 - accuracy: 0.9969 - val_loss: 0.3059 - val_accuracy: 0.9369
-Epoch 208/1000
-60000/60000 - 6s - loss: 0.0221 - accuracy: 0.9970 - val_loss: 0.3057 - val_accuracy: 0.9375
-Epoch 209/1000
-60000/60000 - 6s - loss: 0.0220 - accuracy: 0.9970 - val_loss: 0.3062 - val_accuracy: 0.9366
-Epoch 210/1000
-60000/60000 - 6s - loss: 0.0220 - accuracy: 0.9969 - val_loss: 0.3069 - val_accuracy: 0.9378
-Epoch 211/1000
-60000/60000 - 6s - loss: 0.0219 - accuracy: 0.9970 - val_loss: 0.3075 - val_accuracy: 0.9378
-Epoch 212/1000
-60000/60000 - 6s - loss: 0.0219 - accuracy: 0.9970 - val_loss: 0.3071 - val_accuracy: 0.9378
-Epoch 213/1000
-60000/60000 - 6s - loss: 0.0217 - accuracy: 0.9972 - val_loss: 0.3071 - val_accuracy: 0.9364
-Epoch 214/1000
-60000/60000 - 6s - loss: 0.0216 - accuracy: 0.9971 - val_loss: 0.3077 - val_accuracy: 0.9366
-Epoch 215/1000
-60000/60000 - 6s - loss: 0.0215 - accuracy: 0.9971 - val_loss: 0.3081 - val_accuracy: 0.9372
-Epoch 216/1000
-60000/60000 - 6s - loss: 0.0214 - accuracy: 0.9973 - val_loss: 0.3118 - val_accuracy: 0.9361
-Epoch 217/1000
-60000/60000 - 6s - loss: 0.0212 - accuracy: 0.9972 - val_loss: 0.3095 - val_accuracy: 0.9371
-Epoch 218/1000
-60000/60000 - 6s - loss: 0.0212 - accuracy: 0.9973 - val_loss: 0.3098 - val_accuracy: 0.9372
-Epoch 219/1000
-60000/60000 - 6s - loss: 0.0212 - accuracy: 0.9972 - val_loss: 0.3096 - val_accuracy: 0.9385
-Epoch 220/1000
-60000/60000 - 6s - loss: 0.0210 - accuracy: 0.9972 - val_loss: 0.3107 - val_accuracy: 0.9366
-Epoch 221/1000
-60000/60000 - 6s - loss: 0.0209 - accuracy: 0.9973 - val_loss: 0.3112 - val_accuracy: 0.9369
-Epoch 222/1000
-60000/60000 - 6s - loss: 0.0209 - accuracy: 0.9972 - val_loss: 0.3111 - val_accuracy: 0.9371
-Epoch 223/1000
-60000/60000 - 6s - loss: 0.0208 - accuracy: 0.9973 - val_loss: 0.3125 - val_accuracy: 0.9372
-Epoch 224/1000
-60000/60000 - 6s - loss: 0.0207 - accuracy: 0.9973 - val_loss: 0.3126 - val_accuracy: 0.9367
-Epoch 225/1000
-60000/60000 - 6s - loss: 0.0206 - accuracy: 0.9974 - val_loss: 0.3122 - val_accuracy: 0.9376
-Epoch 226/1000
-60000/60000 - 6s - loss: 0.0205 - accuracy: 0.9974 - val_loss: 0.3136 - val_accuracy: 0.9370
-Epoch 227/1000
-60000/60000 - 6s - loss: 0.0205 - accuracy: 0.9973 - val_loss: 0.3138 - val_accuracy: 0.9368
-Epoch 228/1000
-60000/60000 - 6s - loss: 0.0203 - accuracy: 0.9974 - val_loss: 0.3150 - val_accuracy: 0.9362
-Epoch 229/1000
-60000/60000 - 6s - loss: 0.0203 - accuracy: 0.9975 - val_loss: 0.3160 - val_accuracy: 0.9368
-Epoch 230/1000
-60000/60000 - 6s - loss: 0.0202 - accuracy: 0.9975 - val_loss: 0.3138 - val_accuracy: 0.9372
-Epoch 231/1000
-60000/60000 - 6s - loss: 0.0201 - accuracy: 0.9975 - val_loss: 0.3140 - val_accuracy: 0.9366
-Epoch 232/1000
-60000/60000 - 6s - loss: 0.0201 - accuracy: 0.9973 - val_loss: 0.3156 - val_accuracy: 0.9371
-Epoch 233/1000
-60000/60000 - 6s - loss: 0.0199 - accuracy: 0.9976 - val_loss: 0.3159 - val_accuracy: 0.9370
-Epoch 234/1000
-60000/60000 - 6s - loss: 0.0198 - accuracy: 0.9975 - val_loss: 0.3163 - val_accuracy: 0.9363
-Epoch 235/1000
-60000/60000 - 6s - loss: 0.0198 - accuracy: 0.9975 - val_loss: 0.3168 - val_accuracy: 0.9366
-Epoch 236/1000
-60000/60000 - 6s - loss: 0.0197 - accuracy: 0.9975 - val_loss: 0.3161 - val_accuracy: 0.9370
-Epoch 237/1000
-60000/60000 - 6s - loss: 0.0196 - accuracy: 0.9975 - val_loss: 0.3183 - val_accuracy: 0.9366
-Epoch 238/1000
-60000/60000 - 6s - loss: 0.0196 - accuracy: 0.9975 - val_loss: 0.3179 - val_accuracy: 0.9361
-Epoch 239/1000
-60000/60000 - 6s - loss: 0.0195 - accuracy: 0.9976 - val_loss: 0.3196 - val_accuracy: 0.9359
-Epoch 240/1000
-60000/60000 - 6s - loss: 0.0193 - accuracy: 0.9976 - val_loss: 0.3190 - val_accuracy: 0.9362
-Epoch 241/1000
-60000/60000 - 6s - loss: 0.0194 - accuracy: 0.9976 - val_loss: 0.3193 - val_accuracy: 0.9366
-Epoch 242/1000
-60000/60000 - 6s - loss: 0.0192 - accuracy: 0.9976 - val_loss: 0.3202 - val_accuracy: 0.9363
-Epoch 243/1000
-60000/60000 - 6s - loss: 0.0191 - accuracy: 0.9976 - val_loss: 0.3196 - val_accuracy: 0.9360
-Epoch 244/1000
-60000/60000 - 6s - loss: 0.0191 - accuracy: 0.9977 - val_loss: 0.3217 - val_accuracy: 0.9363
-Epoch 245/1000
-60000/60000 - 6s - loss: 0.0190 - accuracy: 0.9977 - val_loss: 0.3217 - val_accuracy: 0.9359
-Epoch 246/1000
-60000/60000 - 6s - loss: 0.0190 - accuracy: 0.9976 - val_loss: 0.3218 - val_accuracy: 0.9365
-Epoch 247/1000
-60000/60000 - 6s - loss: 0.0189 - accuracy: 0.9977 - val_loss: 0.3210 - val_accuracy: 0.9362
-Epoch 248/1000
-60000/60000 - 6s - loss: 0.0188 - accuracy: 0.9977 - val_loss: 0.3223 - val_accuracy: 0.9357
-Epoch 249/1000
-60000/60000 - 6s - loss: 0.0187 - accuracy: 0.9977 - val_loss: 0.3233 - val_accuracy: 0.9360
-Epoch 250/1000
-60000/60000 - 6s - loss: 0.0186 - accuracy: 0.9977 - val_loss: 0.3227 - val_accuracy: 0.9354
-Epoch 251/1000
-60000/60000 - 6s - loss: 0.0185 - accuracy: 0.9978 - val_loss: 0.3238 - val_accuracy: 0.9359
-Epoch 252/1000
-60000/60000 - 6s - loss: 0.0185 - accuracy: 0.9978 - val_loss: 0.3231 - val_accuracy: 0.9365
-Epoch 253/1000
-60000/60000 - 6s - loss: 0.0185 - accuracy: 0.9977 - val_loss: 0.3225 - val_accuracy: 0.9354
-Epoch 254/1000
-60000/60000 - 6s - loss: 0.0184 - accuracy: 0.9977 - val_loss: 0.3249 - val_accuracy: 0.9359
-Epoch 255/1000
-60000/60000 - 6s - loss: 0.0183 - accuracy: 0.9978 - val_loss: 0.3256 - val_accuracy: 0.9362
-Epoch 256/1000
-60000/60000 - 6s - loss: 0.0182 - accuracy: 0.9977 - val_loss: 0.3257 - val_accuracy: 0.9358
-Epoch 257/1000
-60000/60000 - 6s - loss: 0.0181 - accuracy: 0.9979 - val_loss: 0.3261 - val_accuracy: 0.9351
-Epoch 258/1000
-60000/60000 - 6s - loss: 0.0181 - accuracy: 0.9977 - val_loss: 0.3256 - val_accuracy: 0.9359
-Epoch 259/1000
-60000/60000 - 6s - loss: 0.0180 - accuracy: 0.9979 - val_loss: 0.3255 - val_accuracy: 0.9360
-Epoch 260/1000
-60000/60000 - 6s - loss: 0.0180 - accuracy: 0.9979 - val_loss: 0.3263 - val_accuracy: 0.9357
-Epoch 261/1000
-60000/60000 - 6s - loss: 0.0179 - accuracy: 0.9977 - val_loss: 0.3272 - val_accuracy: 0.9357
-Epoch 262/1000
-60000/60000 - 6s - loss: 0.0178 - accuracy: 0.9980 - val_loss: 0.3269 - val_accuracy: 0.9356
-Epoch 263/1000
-60000/60000 - 6s - loss: 0.0177 - accuracy: 0.9980 - val_loss: 0.3272 - val_accuracy: 0.9361
-Epoch 264/1000
-60000/60000 - 6s - loss: 0.0177 - accuracy: 0.9979 - val_loss: 0.3276 - val_accuracy: 0.9352
-Epoch 265/1000
-60000/60000 - 6s - loss: 0.0176 - accuracy: 0.9979 - val_loss: 0.3275 - val_accuracy: 0.9354
-Epoch 266/1000
-60000/60000 - 6s - loss: 0.0176 - accuracy: 0.9978 - val_loss: 0.3288 - val_accuracy: 0.9363
-Epoch 267/1000
-60000/60000 - 6s - loss: 0.0176 - accuracy: 0.9978 - val_loss: 0.3300 - val_accuracy: 0.9349
-Epoch 268/1000
-60000/60000 - 6s - loss: 0.0174 - accuracy: 0.9980 - val_loss: 0.3294 - val_accuracy: 0.9356
-Epoch 269/1000
-60000/60000 - 6s - loss: 0.0174 - accuracy: 0.9980 - val_loss: 0.3292 - val_accuracy: 0.9354
-Epoch 270/1000
-60000/60000 - 6s - loss: 0.0173 - accuracy: 0.9979 - val_loss: 0.3301 - val_accuracy: 0.9354
-Epoch 271/1000
-60000/60000 - 6s - loss: 0.0173 - accuracy: 0.9980 - val_loss: 0.3306 - val_accuracy: 0.9344
-Epoch 272/1000
-60000/60000 - 6s - loss: 0.0172 - accuracy: 0.9980 - val_loss: 0.3324 - val_accuracy: 0.9344
-Epoch 273/1000
-60000/60000 - 6s - loss: 0.0171 - accuracy: 0.9980 - val_loss: 0.3321 - val_accuracy: 0.9348
-Epoch 274/1000
-60000/60000 - 6s - loss: 0.0170 - accuracy: 0.9979 - val_loss: 0.3321 - val_accuracy: 0.9346
-Epoch 275/1000
-60000/60000 - 6s - loss: 0.0171 - accuracy: 0.9980 - val_loss: 0.3309 - val_accuracy: 0.9354
-Epoch 276/1000
-60000/60000 - 6s - loss: 0.0169 - accuracy: 0.9980 - val_loss: 0.3308 - val_accuracy: 0.9352
-Epoch 277/1000
-60000/60000 - 6s - loss: 0.0168 - accuracy: 0.9980 - val_loss: 0.3310 - val_accuracy: 0.9351
-Epoch 278/1000
-60000/60000 - 6s - loss: 0.0168 - accuracy: 0.9980 - val_loss: 0.3324 - val_accuracy: 0.9349
-Epoch 279/1000
-60000/60000 - 6s - loss: 0.0167 - accuracy: 0.9981 - val_loss: 0.3346 - val_accuracy: 0.9343
-Epoch 280/1000
-60000/60000 - 6s - loss: 0.0167 - accuracy: 0.9981 - val_loss: 0.3331 - val_accuracy: 0.9349
-Epoch 281/1000
-60000/60000 - 6s - loss: 0.0166 - accuracy: 0.9980 - val_loss: 0.3339 - val_accuracy: 0.9344
-Epoch 282/1000
-60000/60000 - 6s - loss: 0.0166 - accuracy: 0.9981 - val_loss: 0.3351 - val_accuracy: 0.9347
-Epoch 283/1000
-60000/60000 - 6s - loss: 0.0166 - accuracy: 0.9981 - val_loss: 0.3362 - val_accuracy: 0.9348
-Epoch 284/1000
-60000/60000 - 6s - loss: 0.0165 - accuracy: 0.9981 - val_loss: 0.3354 - val_accuracy: 0.9338
-Epoch 285/1000
-60000/60000 - 6s - loss: 0.0164 - accuracy: 0.9981 - val_loss: 0.3350 - val_accuracy: 0.9356
-Epoch 286/1000
-60000/60000 - 6s - loss: 0.0164 - accuracy: 0.9981 - val_loss: 0.3355 - val_accuracy: 0.9349
-Epoch 287/1000
-60000/60000 - 6s - loss: 0.0163 - accuracy: 0.9981 - val_loss: 0.3356 - val_accuracy: 0.9346
-Epoch 288/1000
-60000/60000 - 6s - loss: 0.0163 - accuracy: 0.9981 - val_loss: 0.3365 - val_accuracy: 0.9339
-Epoch 289/1000
-60000/60000 - 6s - loss: 0.0162 - accuracy: 0.9981 - val_loss: 0.3357 - val_accuracy: 0.9351
-Epoch 290/1000
-60000/60000 - 6s - loss: 0.0161 - accuracy: 0.9981 - val_loss: 0.3377 - val_accuracy: 0.9348
-Epoch 291/1000
-60000/60000 - 6s - loss: 0.0161 - accuracy: 0.9982 - val_loss: 0.3377 - val_accuracy: 0.9345
-Epoch 292/1000
-60000/60000 - 6s - loss: 0.0160 - accuracy: 0.9981 - val_loss: 0.3370 - val_accuracy: 0.9350
-Epoch 293/1000
-60000/60000 - 6s - loss: 0.0160 - accuracy: 0.9983 - val_loss: 0.3380 - val_accuracy: 0.9340
-Epoch 294/1000
-60000/60000 - 6s - loss: 0.0160 - accuracy: 0.9981 - val_loss: 0.3387 - val_accuracy: 0.9345
-Epoch 295/1000
-60000/60000 - 6s - loss: 0.0159 - accuracy: 0.9982 - val_loss: 0.3378 - val_accuracy: 0.9343
-Epoch 296/1000
-60000/60000 - 6s - loss: 0.0158 - accuracy: 0.9982 - val_loss: 0.3379 - val_accuracy: 0.9348
-Epoch 297/1000
-60000/60000 - 6s - loss: 0.0157 - accuracy: 0.9983 - val_loss: 0.3384 - val_accuracy: 0.9342
-Epoch 298/1000
-60000/60000 - 6s - loss: 0.0157 - accuracy: 0.9983 - val_loss: 0.3389 - val_accuracy: 0.9340
-Epoch 299/1000
-60000/60000 - 6s - loss: 0.0157 - accuracy: 0.9983 - val_loss: 0.3400 - val_accuracy: 0.9334
-Epoch 300/1000
-60000/60000 - 6s - loss: 0.0156 - accuracy: 0.9983 - val_loss: 0.3401 - val_accuracy: 0.9339
-Epoch 301/1000
-60000/60000 - 6s - loss: 0.0156 - accuracy: 0.9984 - val_loss: 0.3393 - val_accuracy: 0.9344
-Epoch 302/1000
-60000/60000 - 6s - loss: 0.0155 - accuracy: 0.9984 - val_loss: 0.3403 - val_accuracy: 0.9345
-Epoch 303/1000
-60000/60000 - 6s - loss: 0.0154 - accuracy: 0.9982 - val_loss: 0.3408 - val_accuracy: 0.9343
-Epoch 304/1000
-60000/60000 - 6s - loss: 0.0154 - accuracy: 0.9983 - val_loss: 0.3415 - val_accuracy: 0.9347
-Epoch 305/1000
-60000/60000 - 6s - loss: 0.0154 - accuracy: 0.9983 - val_loss: 0.3417 - val_accuracy: 0.9340
-Epoch 306/1000
-60000/60000 - 6s - loss: 0.0153 - accuracy: 0.9983 - val_loss: 0.3407 - val_accuracy: 0.9341
-Epoch 307/1000
-60000/60000 - 6s - loss: 0.0153 - accuracy: 0.9984 - val_loss: 0.3414 - val_accuracy: 0.9348
-Epoch 308/1000
-60000/60000 - 6s - loss: 0.0152 - accuracy: 0.9984 - val_loss: 0.3433 - val_accuracy: 0.9341
-Epoch 309/1000
-60000/60000 - 6s - loss: 0.0152 - accuracy: 0.9984 - val_loss: 0.3423 - val_accuracy: 0.9339
-Epoch 310/1000
-60000/60000 - 6s - loss: 0.0152 - accuracy: 0.9984 - val_loss: 0.3435 - val_accuracy: 0.9344
-Epoch 311/1000
-60000/60000 - 6s - loss: 0.0151 - accuracy: 0.9984 - val_loss: 0.3447 - val_accuracy: 0.9341
-Epoch 312/1000
-60000/60000 - 6s - loss: 0.0150 - accuracy: 0.9984 - val_loss: 0.3436 - val_accuracy: 0.9346
-Epoch 313/1000
-60000/60000 - 6s - loss: 0.0150 - accuracy: 0.9984 - val_loss: 0.3441 - val_accuracy: 0.9339
-Epoch 314/1000
-60000/60000 - 6s - loss: 0.0150 - accuracy: 0.9983 - val_loss: 0.3444 - val_accuracy: 0.9341
-Epoch 315/1000
-60000/60000 - 6s - loss: 0.0149 - accuracy: 0.9983 - val_loss: 0.3442 - val_accuracy: 0.9347
-Epoch 316/1000
-60000/60000 - 6s - loss: 0.0149 - accuracy: 0.9984 - val_loss: 0.3450 - val_accuracy: 0.9336
-Epoch 317/1000
-60000/60000 - 6s - loss: 0.0147 - accuracy: 0.9984 - val_loss: 0.3455 - val_accuracy: 0.9343
-Epoch 318/1000
-60000/60000 - 6s - loss: 0.0148 - accuracy: 0.9984 - val_loss: 0.3459 - val_accuracy: 0.9341
-Epoch 319/1000
-60000/60000 - 6s - loss: 0.0147 - accuracy: 0.9985 - val_loss: 0.3447 - val_accuracy: 0.9341
-Epoch 320/1000
-60000/60000 - 6s - loss: 0.0146 - accuracy: 0.9985 - val_loss: 0.3459 - val_accuracy: 0.9345
-Epoch 321/1000
-60000/60000 - 6s - loss: 0.0146 - accuracy: 0.9984 - val_loss: 0.3456 - val_accuracy: 0.9344
-Epoch 322/1000
-60000/60000 - 6s - loss: 0.0146 - accuracy: 0.9984 - val_loss: 0.3456 - val_accuracy: 0.9350
-Epoch 323/1000
-60000/60000 - 6s - loss: 0.0145 - accuracy: 0.9985 - val_loss: 0.3476 - val_accuracy: 0.9339
-Epoch 324/1000
-60000/60000 - 6s - loss: 0.0145 - accuracy: 0.9985 - val_loss: 0.3475 - val_accuracy: 0.9333
-Epoch 325/1000
-60000/60000 - 6s - loss: 0.0144 - accuracy: 0.9984 - val_loss: 0.3477 - val_accuracy: 0.9339
-Epoch 326/1000
-60000/60000 - 6s - loss: 0.0144 - accuracy: 0.9985 - val_loss: 0.3483 - val_accuracy: 0.9334
-Epoch 327/1000
-60000/60000 - 6s - loss: 0.0144 - accuracy: 0.9985 - val_loss: 0.3471 - val_accuracy: 0.9343
-Epoch 328/1000
-60000/60000 - 6s - loss: 0.0143 - accuracy: 0.9985 - val_loss: 0.3484 - val_accuracy: 0.9340
-Epoch 329/1000
-60000/60000 - 6s - loss: 0.0143 - accuracy: 0.9986 - val_loss: 0.3491 - val_accuracy: 0.9329
-Epoch 330/1000
-60000/60000 - 6s - loss: 0.0142 - accuracy: 0.9984 - val_loss: 0.3490 - val_accuracy: 0.9336
-Epoch 331/1000
-60000/60000 - 6s - loss: 0.0142 - accuracy: 0.9985 - val_loss: 0.3498 - val_accuracy: 0.9337
-Epoch 332/1000
-60000/60000 - 6s - loss: 0.0141 - accuracy: 0.9985 - val_loss: 0.3487 - val_accuracy: 0.9338
-Epoch 333/1000
-60000/60000 - 6s - loss: 0.0141 - accuracy: 0.9985 - val_loss: 0.3491 - val_accuracy: 0.9340
-Epoch 334/1000
-60000/60000 - 6s - loss: 0.0140 - accuracy: 0.9986 - val_loss: 0.3501 - val_accuracy: 0.9333
-Epoch 335/1000
-60000/60000 - 6s - loss: 0.0140 - accuracy: 0.9985 - val_loss: 0.3506 - val_accuracy: 0.9334
-Epoch 336/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9986 - val_loss: 0.3503 - val_accuracy: 0.9342
-Epoch 337/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9986 - val_loss: 0.3513 - val_accuracy: 0.9326
-Epoch 338/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9986 - val_loss: 0.3494 - val_accuracy: 0.9328
-Epoch 339/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9985 - val_loss: 0.3510 - val_accuracy: 0.9326
-Epoch 340/1000
-60000/60000 - 6s - loss: 0.0139 - accuracy: 0.9986 - val_loss: 0.3515 - val_accuracy: 0.9329
-Epoch 341/1000
-60000/60000 - 6s - loss: 0.0137 - accuracy: 0.9986 - val_loss: 0.3518 - val_accuracy: 0.9341
-Epoch 342/1000
-60000/60000 - 6s - loss: 0.0138 - accuracy: 0.9986 - val_loss: 0.3513 - val_accuracy: 0.9334
-Epoch 343/1000
-60000/60000 - 6s - loss: 0.0137 - accuracy: 0.9986 - val_loss: 0.3512 - val_accuracy: 0.9333
-Epoch 344/1000
-60000/60000 - 6s - loss: 0.0136 - accuracy: 0.9985 - val_loss: 0.3522 - val_accuracy: 0.9334
-Epoch 345/1000
-60000/60000 - 6s - loss: 0.0137 - accuracy: 0.9987 - val_loss: 0.3522 - val_accuracy: 0.9341
-Epoch 346/1000
-60000/60000 - 6s - loss: 0.0136 - accuracy: 0.9986 - val_loss: 0.3535 - val_accuracy: 0.9331
-Epoch 347/1000
-60000/60000 - 6s - loss: 0.0136 - accuracy: 0.9986 - val_loss: 0.3541 - val_accuracy: 0.9328
-Epoch 348/1000
-60000/60000 - 6s - loss: 0.0135 - accuracy: 0.9987 - val_loss: 0.3531 - val_accuracy: 0.9326
-Epoch 349/1000
-60000/60000 - 6s - loss: 0.0135 - accuracy: 0.9987 - val_loss: 0.3538 - val_accuracy: 0.9336
-Epoch 350/1000
-60000/60000 - 6s - loss: 0.0134 - accuracy: 0.9987 - val_loss: 0.3531 - val_accuracy: 0.9338
-Epoch 351/1000
-60000/60000 - 6s - loss: 0.0134 - accuracy: 0.9987 - val_loss: 0.3530 - val_accuracy: 0.9337
-Epoch 352/1000
-60000/60000 - 6s - loss: 0.0134 - accuracy: 0.9986 - val_loss: 0.3536 - val_accuracy: 0.9334
-Epoch 353/1000
-60000/60000 - 6s - loss: 0.0133 - accuracy: 0.9987 - val_loss: 0.3563 - val_accuracy: 0.9334
-Epoch 354/1000
-60000/60000 - 6s - loss: 0.0133 - accuracy: 0.9986 - val_loss: 0.3573 - val_accuracy: 0.9330
-Epoch 355/1000
-60000/60000 - 6s - loss: 0.0132 - accuracy: 0.9987 - val_loss: 0.3537 - val_accuracy: 0.9335
-Epoch 356/1000
-60000/60000 - 6s - loss: 0.0132 - accuracy: 0.9987 - val_loss: 0.3554 - val_accuracy: 0.9327
-Epoch 357/1000
-60000/60000 - 6s - loss: 0.0132 - accuracy: 0.9987 - val_loss: 0.3557 - val_accuracy: 0.9334
-Epoch 358/1000
-60000/60000 - 6s - loss: 0.0131 - accuracy: 0.9987 - val_loss: 0.3562 - val_accuracy: 0.9326
-Epoch 359/1000
-60000/60000 - 6s - loss: 0.0131 - accuracy: 0.9987 - val_loss: 0.3581 - val_accuracy: 0.9339
-Epoch 360/1000
-60000/60000 - 6s - loss: 0.0130 - accuracy: 0.9988 - val_loss: 0.3567 - val_accuracy: 0.9337
-Epoch 361/1000
-60000/60000 - 6s - loss: 0.0130 - accuracy: 0.9987 - val_loss: 0.3567 - val_accuracy: 0.9328
-Epoch 362/1000
-60000/60000 - 6s - loss: 0.0130 - accuracy: 0.9987 - val_loss: 0.3588 - val_accuracy: 0.9334
-Epoch 363/1000
-60000/60000 - 6s - loss: 0.0129 - accuracy: 0.9987 - val_loss: 0.3576 - val_accuracy: 0.9333
-Epoch 364/1000
-60000/60000 - 6s - loss: 0.0129 - accuracy: 0.9987 - val_loss: 0.3575 - val_accuracy: 0.9334
-Epoch 365/1000
-60000/60000 - 6s - loss: 0.0129 - accuracy: 0.9987 - val_loss: 0.3570 - val_accuracy: 0.9328
-Epoch 366/1000
-60000/60000 - 6s - loss: 0.0129 - accuracy: 0.9988 - val_loss: 0.3580 - val_accuracy: 0.9339
-Epoch 367/1000
-60000/60000 - 6s - loss: 0.0128 - accuracy: 0.9987 - val_loss: 0.3589 - val_accuracy: 0.9328
-Epoch 368/1000
-60000/60000 - 6s - loss: 0.0128 - accuracy: 0.9987 - val_loss: 0.3587 - val_accuracy: 0.9325
-Epoch 369/1000
-60000/60000 - 6s - loss: 0.0127 - accuracy: 0.9988 - val_loss: 0.3586 - val_accuracy: 0.9329
-Epoch 370/1000
-60000/60000 - 6s - loss: 0.0127 - accuracy: 0.9987 - val_loss: 0.3587 - val_accuracy: 0.9331
-Epoch 371/1000
-60000/60000 - 6s - loss: 0.0127 - accuracy: 0.9988 - val_loss: 0.3589 - val_accuracy: 0.9332
-Epoch 372/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9988 - val_loss: 0.3605 - val_accuracy: 0.9321
-Epoch 373/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9988 - val_loss: 0.3602 - val_accuracy: 0.9330
-Epoch 374/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9988 - val_loss: 0.3598 - val_accuracy: 0.9331
-Epoch 375/1000
-60000/60000 - 6s - loss: 0.0126 - accuracy: 0.9988 - val_loss: 0.3604 - val_accuracy: 0.9331
-Epoch 376/1000
-60000/60000 - 6s - loss: 0.0125 - accuracy: 0.9988 - val_loss: 0.3602 - val_accuracy: 0.9333
-Epoch 377/1000
-60000/60000 - 6s - loss: 0.0125 - accuracy: 0.9987 - val_loss: 0.3622 - val_accuracy: 0.9328
-Epoch 378/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9988 - val_loss: 0.3599 - val_accuracy: 0.9336
-Epoch 379/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9987 - val_loss: 0.3621 - val_accuracy: 0.9322
-Epoch 380/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9988 - val_loss: 0.3616 - val_accuracy: 0.9337
-Epoch 381/1000
-60000/60000 - 6s - loss: 0.0124 - accuracy: 0.9988 - val_loss: 0.3621 - val_accuracy: 0.9334
-Epoch 382/1000
-60000/60000 - 6s - loss: 0.0123 - accuracy: 0.9988 - val_loss: 0.3629 - val_accuracy: 0.9342
-Epoch 383/1000
-60000/60000 - 6s - loss: 0.0123 - accuracy: 0.9988 - val_loss: 0.3632 - val_accuracy: 0.9333
-Epoch 384/1000
-60000/60000 - 6s - loss: 0.0123 - accuracy: 0.9988 - val_loss: 0.3630 - val_accuracy: 0.9334
-Epoch 385/1000
-60000/60000 - 6s - loss: 0.0122 - accuracy: 0.9988 - val_loss: 0.3624 - val_accuracy: 0.9332
-Epoch 386/1000
-60000/60000 - 6s - loss: 0.0122 - accuracy: 0.9989 - val_loss: 0.3631 - val_accuracy: 0.9323
-Epoch 387/1000
-60000/60000 - 6s - loss: 0.0121 - accuracy: 0.9988 - val_loss: 0.3641 - val_accuracy: 0.9340
-Epoch 388/1000
-60000/60000 - 6s - loss: 0.0121 - accuracy: 0.9989 - val_loss: 0.3636 - val_accuracy: 0.9325
-Epoch 389/1000
-60000/60000 - 6s - loss: 0.0121 - accuracy: 0.9988 - val_loss: 0.3646 - val_accuracy: 0.9330
-Epoch 390/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9988 - val_loss: 0.3654 - val_accuracy: 0.9327
-Epoch 391/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9988 - val_loss: 0.3643 - val_accuracy: 0.9330
-Epoch 392/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9988 - val_loss: 0.3647 - val_accuracy: 0.9326
-Epoch 393/1000
-60000/60000 - 6s - loss: 0.0120 - accuracy: 0.9988 - val_loss: 0.3656 - val_accuracy: 0.9330
-Epoch 394/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9988 - val_loss: 0.3640 - val_accuracy: 0.9324
-Epoch 395/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9989 - val_loss: 0.3649 - val_accuracy: 0.9334
-Epoch 396/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9989 - val_loss: 0.3658 - val_accuracy: 0.9337
-Epoch 397/1000
-60000/60000 - 6s - loss: 0.0119 - accuracy: 0.9989 - val_loss: 0.3661 - val_accuracy: 0.9338
-Epoch 398/1000
-60000/60000 - 6s - loss: 0.0118 - accuracy: 0.9989 - val_loss: 0.3658 - val_accuracy: 0.9330
-Epoch 399/1000
-60000/60000 - 6s - loss: 0.0118 - accuracy: 0.9989 - val_loss: 0.3672 - val_accuracy: 0.9336
-Epoch 400/1000
-60000/60000 - 6s - loss: 0.0118 - accuracy: 0.9989 - val_loss: 0.3670 - val_accuracy: 0.9329
-Epoch 401/1000
-60000/60000 - 6s - loss: 0.0117 - accuracy: 0.9989 - val_loss: 0.3665 - val_accuracy: 0.9333
-Epoch 402/1000
-60000/60000 - 6s - loss: 0.0117 - accuracy: 0.9989 - val_loss: 0.3671 - val_accuracy: 0.9331
-Epoch 403/1000
-60000/60000 - 6s - loss: 0.0117 - accuracy: 0.9989 - val_loss: 0.3667 - val_accuracy: 0.9332
-Epoch 404/1000
-60000/60000 - 6s - loss: 0.0116 - accuracy: 0.9989 - val_loss: 0.3667 - val_accuracy: 0.9333
-Epoch 405/1000
-60000/60000 - 6s - loss: 0.0116 - accuracy: 0.9989 - val_loss: 0.3681 - val_accuracy: 0.9337
-Epoch 406/1000
-60000/60000 - 6s - loss: 0.0116 - accuracy: 0.9989 - val_loss: 0.3677 - val_accuracy: 0.9327
-Epoch 407/1000
-60000/60000 - 6s - loss: 0.0116 - accuracy: 0.9989 - val_loss: 0.3686 - val_accuracy: 0.9329
-Epoch 408/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9989 - val_loss: 0.3681 - val_accuracy: 0.9328
-Epoch 409/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9989 - val_loss: 0.3682 - val_accuracy: 0.9340
-Epoch 410/1000
-60000/60000 - 6s - loss: 0.0115 - accuracy: 0.9989 - val_loss: 0.3681 - val_accuracy: 0.9334
-Epoch 411/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9989 - val_loss: 0.3698 - val_accuracy: 0.9334
-Epoch 412/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9989 - val_loss: 0.3692 - val_accuracy: 0.9328
-Epoch 413/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9989 - val_loss: 0.3698 - val_accuracy: 0.9334
-Epoch 414/1000
-60000/60000 - 6s - loss: 0.0114 - accuracy: 0.9989 - val_loss: 0.3702 - val_accuracy: 0.9327
-Epoch 415/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9989 - val_loss: 0.3701 - val_accuracy: 0.9327
-Epoch 416/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9989 - val_loss: 0.3705 - val_accuracy: 0.9326
-Epoch 417/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9990 - val_loss: 0.3714 - val_accuracy: 0.9329
-Epoch 418/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9990 - val_loss: 0.3710 - val_accuracy: 0.9330
-Epoch 419/1000
-60000/60000 - 6s - loss: 0.0113 - accuracy: 0.9989 - val_loss: 0.3706 - val_accuracy: 0.9325
-Epoch 420/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9989 - val_loss: 0.3718 - val_accuracy: 0.9336
-Epoch 421/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9990 - val_loss: 0.3730 - val_accuracy: 0.9327
-Epoch 422/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9989 - val_loss: 0.3720 - val_accuracy: 0.9330
-Epoch 423/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9989 - val_loss: 0.3724 - val_accuracy: 0.9335
-Epoch 424/1000
-60000/60000 - 6s - loss: 0.0112 - accuracy: 0.9989 - val_loss: 0.3721 - val_accuracy: 0.9323
-Epoch 425/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9990 - val_loss: 0.3720 - val_accuracy: 0.9323
-Epoch 426/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9990 - val_loss: 0.3725 - val_accuracy: 0.9324
-Epoch 427/1000
-60000/60000 - 6s - loss: 0.0111 - accuracy: 0.9989 - val_loss: 0.3730 - val_accuracy: 0.9331
-Epoch 428/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9989 - val_loss: 0.3734 - val_accuracy: 0.9321
-Epoch 429/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9990 - val_loss: 0.3731 - val_accuracy: 0.9327
-Epoch 430/1000
-60000/60000 - 6s - loss: 0.0110 - accuracy: 0.9990 - val_loss: 0.3734 - val_accuracy: 0.9328
-Epoch 431/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9990 - val_loss: 0.3742 - val_accuracy: 0.9324
-Epoch 432/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9990 - val_loss: 0.3739 - val_accuracy: 0.9333
-Epoch 433/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9990 - val_loss: 0.3736 - val_accuracy: 0.9332
-Epoch 434/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9991 - val_loss: 0.3755 - val_accuracy: 0.9315
-Epoch 435/1000
-60000/60000 - 6s - loss: 0.0109 - accuracy: 0.9990 - val_loss: 0.3752 - val_accuracy: 0.9324
-Epoch 436/1000
-60000/60000 - 6s - loss: 0.0108 - accuracy: 0.9990 - val_loss: 0.3747 - val_accuracy: 0.9328
-Epoch 437/1000
-60000/60000 - 6s - loss: 0.0108 - accuracy: 0.9991 - val_loss: 0.3762 - val_accuracy: 0.9330
-Epoch 438/1000
-60000/60000 - 6s - loss: 0.0108 - accuracy: 0.9990 - val_loss: 0.3766 - val_accuracy: 0.9327
-Epoch 439/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9990 - val_loss: 0.3754 - val_accuracy: 0.9326
-Epoch 440/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9990 - val_loss: 0.3756 - val_accuracy: 0.9326
-Epoch 441/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9990 - val_loss: 0.3766 - val_accuracy: 0.9324
-Epoch 442/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9990 - val_loss: 0.3760 - val_accuracy: 0.9323
-Epoch 443/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9990 - val_loss: 0.3774 - val_accuracy: 0.9331
-Epoch 444/1000
-60000/60000 - 6s - loss: 0.0107 - accuracy: 0.9991 - val_loss: 0.3775 - val_accuracy: 0.9325
-Epoch 445/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9990 - val_loss: 0.3766 - val_accuracy: 0.9324
-Epoch 446/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9990 - val_loss: 0.3776 - val_accuracy: 0.9327
-Epoch 447/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9990 - val_loss: 0.3773 - val_accuracy: 0.9324
-Epoch 448/1000
-60000/60000 - 6s - loss: 0.0106 - accuracy: 0.9991 - val_loss: 0.3778 - val_accuracy: 0.9322
-Epoch 449/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9991 - val_loss: 0.3788 - val_accuracy: 0.9325
-Epoch 450/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9991 - val_loss: 0.3794 - val_accuracy: 0.9325
-Epoch 451/1000
-60000/60000 - 6s - loss: 0.0105 - accuracy: 0.9990 - val_loss: 0.3779 - val_accuracy: 0.9319
-Epoch 452/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9990 - val_loss: 0.3793 - val_accuracy: 0.9315
-Epoch 453/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9991 - val_loss: 0.3787 - val_accuracy: 0.9326
-Epoch 454/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9990 - val_loss: 0.3787 - val_accuracy: 0.9322
-Epoch 455/1000
-60000/60000 - 6s - loss: 0.0104 - accuracy: 0.9990 - val_loss: 0.3794 - val_accuracy: 0.9318
-Epoch 456/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9991 - val_loss: 0.3808 - val_accuracy: 0.9328
-Epoch 457/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9991 - val_loss: 0.3800 - val_accuracy: 0.9326
-Epoch 458/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9990 - val_loss: 0.3795 - val_accuracy: 0.9325
-Epoch 459/1000
-60000/60000 - 6s - loss: 0.0103 - accuracy: 0.9991 - val_loss: 0.3812 - val_accuracy: 0.9321
-Epoch 460/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9990 - val_loss: 0.3811 - val_accuracy: 0.9323
-Epoch 461/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9991 - val_loss: 0.3806 - val_accuracy: 0.9322
-Epoch 462/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9991 - val_loss: 0.3812 - val_accuracy: 0.9331
-Epoch 463/1000
-60000/60000 - 6s - loss: 0.0102 - accuracy: 0.9991 - val_loss: 0.3813 - val_accuracy: 0.9328
-Epoch 464/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9991 - val_loss: 0.3812 - val_accuracy: 0.9322
-Epoch 465/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9991 - val_loss: 0.3817 - val_accuracy: 0.9319
-Epoch 466/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9991 - val_loss: 0.3817 - val_accuracy: 0.9324
-Epoch 467/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9991 - val_loss: 0.3833 - val_accuracy: 0.9326
-Epoch 468/1000
-60000/60000 - 6s - loss: 0.0101 - accuracy: 0.9991 - val_loss: 0.3824 - val_accuracy: 0.9324
-Epoch 469/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9991 - val_loss: 0.3825 - val_accuracy: 0.9315
-Epoch 470/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9991 - val_loss: 0.3830 - val_accuracy: 0.9321
-Epoch 471/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9990 - val_loss: 0.3822 - val_accuracy: 0.9326
-Epoch 472/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9991 - val_loss: 0.3839 - val_accuracy: 0.9320
-Epoch 473/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9991 - val_loss: 0.3827 - val_accuracy: 0.9320
-Epoch 474/1000
-60000/60000 - 6s - loss: 0.0100 - accuracy: 0.9991 - val_loss: 0.3839 - val_accuracy: 0.9328
-Epoch 475/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9991 - val_loss: 0.3835 - val_accuracy: 0.9327
-Epoch 476/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9991 - val_loss: 0.3838 - val_accuracy: 0.9326
-Epoch 477/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9991 - val_loss: 0.3841 - val_accuracy: 0.9322
-Epoch 478/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9991 - val_loss: 0.3845 - val_accuracy: 0.9328
-Epoch 479/1000
-60000/60000 - 6s - loss: 0.0099 - accuracy: 0.9991 - val_loss: 0.3845 - val_accuracy: 0.9323
-Epoch 480/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9991 - val_loss: 0.3844 - val_accuracy: 0.9327
-Epoch 481/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9991 - val_loss: 0.3853 - val_accuracy: 0.9322
-Epoch 482/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9991 - val_loss: 0.3853 - val_accuracy: 0.9322
-Epoch 483/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9991 - val_loss: 0.3857 - val_accuracy: 0.9321
-Epoch 484/1000
-60000/60000 - 6s - loss: 0.0098 - accuracy: 0.9991 - val_loss: 0.3866 - val_accuracy: 0.9321
-Epoch 485/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9991 - val_loss: 0.3857 - val_accuracy: 0.9319
-Epoch 486/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9991 - val_loss: 0.3859 - val_accuracy: 0.9321
-Epoch 487/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9991 - val_loss: 0.3858 - val_accuracy: 0.9317
-Epoch 488/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9991 - val_loss: 0.3867 - val_accuracy: 0.9325
-Epoch 489/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9991 - val_loss: 0.3867 - val_accuracy: 0.9323
-Epoch 490/1000
-60000/60000 - 6s - loss: 0.0097 - accuracy: 0.9991 - val_loss: 0.3872 - val_accuracy: 0.9323
-Epoch 491/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9991 - val_loss: 0.3876 - val_accuracy: 0.9323
-Epoch 492/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9991 - val_loss: 0.3878 - val_accuracy: 0.9326
-Epoch 493/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9991 - val_loss: 0.3871 - val_accuracy: 0.9319
-Epoch 494/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9992 - val_loss: 0.3881 - val_accuracy: 0.9322
-Epoch 495/1000
-60000/60000 - 6s - loss: 0.0096 - accuracy: 0.9991 - val_loss: 0.3887 - val_accuracy: 0.9328
-Epoch 496/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9992 - val_loss: 0.3881 - val_accuracy: 0.9322
-Epoch 497/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9991 - val_loss: 0.3884 - val_accuracy: 0.9317
-Epoch 498/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9991 - val_loss: 0.3888 - val_accuracy: 0.9322
-Epoch 499/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9991 - val_loss: 0.3890 - val_accuracy: 0.9326
-Epoch 500/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9991 - val_loss: 0.3885 - val_accuracy: 0.9324
-Epoch 501/1000
-60000/60000 - 6s - loss: 0.0095 - accuracy: 0.9991 - val_loss: 0.3890 - val_accuracy: 0.9321
-Epoch 502/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9991 - val_loss: 0.3891 - val_accuracy: 0.9317
-Epoch 503/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9992 - val_loss: 0.3896 - val_accuracy: 0.9322
-Epoch 504/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9992 - val_loss: 0.3905 - val_accuracy: 0.9319
-Epoch 505/1000
-60000/60000 - 6s - loss: 0.0094 - accuracy: 0.9992 - val_loss: 0.3898 - val_accuracy: 0.9317
-Epoch 506/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9991 - val_loss: 0.3911 - val_accuracy: 0.9317
-Epoch 507/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9991 - val_loss: 0.3900 - val_accuracy: 0.9326
-Epoch 508/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9992 - val_loss: 0.3906 - val_accuracy: 0.9317
-Epoch 509/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9991 - val_loss: 0.3901 - val_accuracy: 0.9321
-Epoch 510/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9992 - val_loss: 0.3919 - val_accuracy: 0.9320
-Epoch 511/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9992 - val_loss: 0.3904 - val_accuracy: 0.9319
-Epoch 512/1000
-60000/60000 - 6s - loss: 0.0093 - accuracy: 0.9991 - val_loss: 0.3908 - val_accuracy: 0.9320
-Epoch 513/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9992 - val_loss: 0.3916 - val_accuracy: 0.9322
-Epoch 514/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9991 - val_loss: 0.3918 - val_accuracy: 0.9319
-Epoch 515/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9992 - val_loss: 0.3920 - val_accuracy: 0.9321
-Epoch 516/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9992 - val_loss: 0.3919 - val_accuracy: 0.9321
-Epoch 517/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9992 - val_loss: 0.3921 - val_accuracy: 0.9316
-Epoch 518/1000
-60000/60000 - 6s - loss: 0.0092 - accuracy: 0.9992 - val_loss: 0.3930 - val_accuracy: 0.9326
-Epoch 519/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9992 - val_loss: 0.3940 - val_accuracy: 0.9316
-Epoch 520/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9991 - val_loss: 0.3930 - val_accuracy: 0.9311
-Epoch 521/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9992 - val_loss: 0.3936 - val_accuracy: 0.9323
-Epoch 522/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9992 - val_loss: 0.3937 - val_accuracy: 0.9324
-Epoch 523/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9992 - val_loss: 0.3943 - val_accuracy: 0.9316
-Epoch 524/1000
-60000/60000 - 6s - loss: 0.0091 - accuracy: 0.9992 - val_loss: 0.3942 - val_accuracy: 0.9317
-Epoch 525/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9992 - val_loss: 0.3937 - val_accuracy: 0.9316
-Epoch 526/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9992 - val_loss: 0.3936 - val_accuracy: 0.9318
-Epoch 527/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9992 - val_loss: 0.3947 - val_accuracy: 0.9319
-Epoch 528/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9992 - val_loss: 0.3944 - val_accuracy: 0.9309
-Epoch 529/1000
-60000/60000 - 6s - loss: 0.0090 - accuracy: 0.9992 - val_loss: 0.3956 - val_accuracy: 0.9315
-Epoch 530/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9992 - val_loss: 0.3950 - val_accuracy: 0.9314
-Epoch 531/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9992 - val_loss: 0.3966 - val_accuracy: 0.9319
-Epoch 532/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9992 - val_loss: 0.3951 - val_accuracy: 0.9318
-Epoch 533/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9992 - val_loss: 0.3952 - val_accuracy: 0.9319
-Epoch 534/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9992 - val_loss: 0.3952 - val_accuracy: 0.9320
-Epoch 535/1000
-60000/60000 - 6s - loss: 0.0089 - accuracy: 0.9992 - val_loss: 0.3952 - val_accuracy: 0.9323
-Epoch 536/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9992 - val_loss: 0.3964 - val_accuracy: 0.9313
-Epoch 537/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9992 - val_loss: 0.3971 - val_accuracy: 0.9318
-Epoch 538/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9992 - val_loss: 0.3972 - val_accuracy: 0.9322
-Epoch 539/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9992 - val_loss: 0.3969 - val_accuracy: 0.9321
-Epoch 540/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9992 - val_loss: 0.3979 - val_accuracy: 0.9317
-Epoch 541/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9992 - val_loss: 0.3975 - val_accuracy: 0.9317
-Epoch 542/1000
-60000/60000 - 6s - loss: 0.0088 - accuracy: 0.9992 - val_loss: 0.3976 - val_accuracy: 0.9313
-Epoch 543/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9992 - val_loss: 0.3990 - val_accuracy: 0.9317
-Epoch 544/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9992 - val_loss: 0.3980 - val_accuracy: 0.9320
-Epoch 545/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9992 - val_loss: 0.3973 - val_accuracy: 0.9320
-Epoch 546/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9992 - val_loss: 0.3977 - val_accuracy: 0.9322
-Epoch 547/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9993 - val_loss: 0.3986 - val_accuracy: 0.9323
-Epoch 548/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9992 - val_loss: 0.3991 - val_accuracy: 0.9319
-Epoch 549/1000
-60000/60000 - 6s - loss: 0.0087 - accuracy: 0.9992 - val_loss: 0.3980 - val_accuracy: 0.9311
-Epoch 550/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9993 - val_loss: 0.3999 - val_accuracy: 0.9316
-Epoch 551/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9992 - val_loss: 0.3991 - val_accuracy: 0.9318
-Epoch 552/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9993 - val_loss: 0.3989 - val_accuracy: 0.9321
-Epoch 553/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9992 - val_loss: 0.3995 - val_accuracy: 0.9317
-Epoch 554/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9992 - val_loss: 0.3997 - val_accuracy: 0.9315
-Epoch 555/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9992 - val_loss: 0.3995 - val_accuracy: 0.9318
-Epoch 556/1000
-60000/60000 - 6s - loss: 0.0086 - accuracy: 0.9993 - val_loss: 0.3998 - val_accuracy: 0.9314
-Epoch 557/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9992 - val_loss: 0.3997 - val_accuracy: 0.9318
-Epoch 558/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9992 - val_loss: 0.4005 - val_accuracy: 0.9318
-Epoch 559/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9992 - val_loss: 0.4007 - val_accuracy: 0.9315
-Epoch 560/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9992 - val_loss: 0.4003 - val_accuracy: 0.9309
-Epoch 561/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9993 - val_loss: 0.4014 - val_accuracy: 0.9316
-Epoch 562/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9993 - val_loss: 0.4018 - val_accuracy: 0.9315
-Epoch 563/1000
-60000/60000 - 6s - loss: 0.0085 - accuracy: 0.9993 - val_loss: 0.4022 - val_accuracy: 0.9323
-Epoch 564/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9992 - val_loss: 0.4019 - val_accuracy: 0.9313
-Epoch 565/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9992 - val_loss: 0.4020 - val_accuracy: 0.9323
-Epoch 566/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9993 - val_loss: 0.4022 - val_accuracy: 0.9311
-Epoch 567/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9992 - val_loss: 0.4017 - val_accuracy: 0.9313
-Epoch 568/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9993 - val_loss: 0.4027 - val_accuracy: 0.9315
-Epoch 569/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9993 - val_loss: 0.4021 - val_accuracy: 0.9312
-Epoch 570/1000
-60000/60000 - 6s - loss: 0.0084 - accuracy: 0.9993 - val_loss: 0.4028 - val_accuracy: 0.9309
-Epoch 571/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9993 - val_loss: 0.4034 - val_accuracy: 0.9316
-Epoch 572/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9993 - val_loss: 0.4035 - val_accuracy: 0.9316
-Epoch 573/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9992 - val_loss: 0.4022 - val_accuracy: 0.9314
-Epoch 574/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9993 - val_loss: 0.4034 - val_accuracy: 0.9317
-Epoch 575/1000
-60000/60000 - 6s - loss: 0.0083 - accuracy: 0.9993 - val_loss: 0.4044 - val_accuracy: 0.9316
-Epoch 576/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9993 - val_loss: 0.4034 - val_accuracy: 0.9317
-Epoch 577/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9993 - val_loss: 0.4042 - val_accuracy: 0.9315
-Epoch 578/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9993 - val_loss: 0.4040 - val_accuracy: 0.9317
-Epoch 579/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9993 - val_loss: 0.4042 - val_accuracy: 0.9317
-Epoch 580/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9993 - val_loss: 0.4046 - val_accuracy: 0.9312
-Epoch 581/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9992 - val_loss: 0.4055 - val_accuracy: 0.9317
-Epoch 582/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9993 - val_loss: 0.4054 - val_accuracy: 0.9317
-Epoch 583/1000
-60000/60000 - 6s - loss: 0.0082 - accuracy: 0.9993 - val_loss: 0.4050 - val_accuracy: 0.9318
-Epoch 584/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9993 - val_loss: 0.4053 - val_accuracy: 0.9315
-Epoch 585/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9993 - val_loss: 0.4067 - val_accuracy: 0.9321
-Epoch 586/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9993 - val_loss: 0.4066 - val_accuracy: 0.9317
-Epoch 587/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9993 - val_loss: 0.4065 - val_accuracy: 0.9310
-Epoch 588/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4061 - val_accuracy: 0.9313
-Epoch 589/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9993 - val_loss: 0.4065 - val_accuracy: 0.9313
-Epoch 590/1000
-60000/60000 - 6s - loss: 0.0081 - accuracy: 0.9993 - val_loss: 0.4068 - val_accuracy: 0.9311
-Epoch 591/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4066 - val_accuracy: 0.9318
-Epoch 592/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4078 - val_accuracy: 0.9311
-Epoch 593/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4068 - val_accuracy: 0.9308
-Epoch 594/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4073 - val_accuracy: 0.9313
-Epoch 595/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4069 - val_accuracy: 0.9317
-Epoch 596/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4070 - val_accuracy: 0.9314
-Epoch 597/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4067 - val_accuracy: 0.9313
-Epoch 598/1000
-60000/60000 - 6s - loss: 0.0080 - accuracy: 0.9993 - val_loss: 0.4088 - val_accuracy: 0.9312
-Epoch 599/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9993 - val_loss: 0.4086 - val_accuracy: 0.9311
-Epoch 600/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9993 - val_loss: 0.4081 - val_accuracy: 0.9314
-Epoch 601/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9993 - val_loss: 0.4072 - val_accuracy: 0.9310
-Epoch 602/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9993 - val_loss: 0.4090 - val_accuracy: 0.9313
-Epoch 603/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9993 - val_loss: 0.4086 - val_accuracy: 0.9308
-Epoch 604/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9993 - val_loss: 0.4077 - val_accuracy: 0.9319
-Epoch 605/1000
-60000/60000 - 6s - loss: 0.0079 - accuracy: 0.9993 - val_loss: 0.4094 - val_accuracy: 0.9312
-Epoch 606/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4092 - val_accuracy: 0.9315
-Epoch 607/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4099 - val_accuracy: 0.9316
-Epoch 608/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4090 - val_accuracy: 0.9316
-Epoch 609/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4105 - val_accuracy: 0.9306
-Epoch 610/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4094 - val_accuracy: 0.9317
-Epoch 611/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4100 - val_accuracy: 0.9309
-Epoch 612/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4104 - val_accuracy: 0.9310
-Epoch 613/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4102 - val_accuracy: 0.9312
-Epoch 614/1000
-60000/60000 - 6s - loss: 0.0078 - accuracy: 0.9993 - val_loss: 0.4109 - val_accuracy: 0.9316
-Epoch 615/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4108 - val_accuracy: 0.9314
-Epoch 616/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4111 - val_accuracy: 0.9316
-Epoch 617/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4111 - val_accuracy: 0.9312
-Epoch 618/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4112 - val_accuracy: 0.9321
-Epoch 619/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4120 - val_accuracy: 0.9312
-Epoch 620/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4116 - val_accuracy: 0.9307
-Epoch 621/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4119 - val_accuracy: 0.9315
-Epoch 622/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4125 - val_accuracy: 0.9318
-Epoch 623/1000
-60000/60000 - 6s - loss: 0.0077 - accuracy: 0.9993 - val_loss: 0.4124 - val_accuracy: 0.9321
-Epoch 624/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4117 - val_accuracy: 0.9310
-Epoch 625/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4124 - val_accuracy: 0.9318
-Epoch 626/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4131 - val_accuracy: 0.9310
-Epoch 627/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4144 - val_accuracy: 0.9316
-Epoch 628/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4135 - val_accuracy: 0.9312
-Epoch 629/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4128 - val_accuracy: 0.9320
-Epoch 630/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4138 - val_accuracy: 0.9316
-Epoch 631/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4134 - val_accuracy: 0.9315
-Epoch 632/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4142 - val_accuracy: 0.9316
-Epoch 633/1000
-60000/60000 - 6s - loss: 0.0076 - accuracy: 0.9993 - val_loss: 0.4136 - val_accuracy: 0.9308
-Epoch 634/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4141 - val_accuracy: 0.9315
-Epoch 635/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4138 - val_accuracy: 0.9312
-Epoch 636/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4143 - val_accuracy: 0.9318
-Epoch 637/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4157 - val_accuracy: 0.9309
-Epoch 638/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4143 - val_accuracy: 0.9317
-Epoch 639/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4147 - val_accuracy: 0.9315
-Epoch 640/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4152 - val_accuracy: 0.9316
-Epoch 641/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4151 - val_accuracy: 0.9311
-Epoch 642/1000
-60000/60000 - 6s - loss: 0.0075 - accuracy: 0.9993 - val_loss: 0.4162 - val_accuracy: 0.9313
-Epoch 643/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9993 - val_loss: 0.4159 - val_accuracy: 0.9314
-Epoch 644/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9993 - val_loss: 0.4159 - val_accuracy: 0.9316
-Epoch 645/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9993 - val_loss: 0.4158 - val_accuracy: 0.9312
-Epoch 646/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9993 - val_loss: 0.4155 - val_accuracy: 0.9313
-Epoch 647/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9993 - val_loss: 0.4168 - val_accuracy: 0.9320
-Epoch 648/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9993 - val_loss: 0.4163 - val_accuracy: 0.9318
-Epoch 649/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9993 - val_loss: 0.4163 - val_accuracy: 0.9315
-Epoch 650/1000
-60000/60000 - 6s - loss: 0.0074 - accuracy: 0.9994 - val_loss: 0.4171 - val_accuracy: 0.9313
-Epoch 651/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9993 - val_loss: 0.4168 - val_accuracy: 0.9321
-Epoch 652/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9994 - val_loss: 0.4172 - val_accuracy: 0.9319
-Epoch 653/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9993 - val_loss: 0.4177 - val_accuracy: 0.9318
-Epoch 654/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9994 - val_loss: 0.4174 - val_accuracy: 0.9313
-Epoch 655/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9993 - val_loss: 0.4177 - val_accuracy: 0.9309
-Epoch 656/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9993 - val_loss: 0.4176 - val_accuracy: 0.9309
-Epoch 657/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9994 - val_loss: 0.4174 - val_accuracy: 0.9315
-Epoch 658/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9993 - val_loss: 0.4184 - val_accuracy: 0.9316
-Epoch 659/1000
-60000/60000 - 6s - loss: 0.0073 - accuracy: 0.9994 - val_loss: 0.4182 - val_accuracy: 0.9310
-Epoch 660/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9993 - val_loss: 0.4179 - val_accuracy: 0.9313
-Epoch 661/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9993 - val_loss: 0.4174 - val_accuracy: 0.9316
-Epoch 662/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9994 - val_loss: 0.4191 - val_accuracy: 0.9314
-Epoch 663/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9993 - val_loss: 0.4177 - val_accuracy: 0.9313
-Epoch 664/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9994 - val_loss: 0.4189 - val_accuracy: 0.9315
-Epoch 665/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9994 - val_loss: 0.4184 - val_accuracy: 0.9317
-Epoch 666/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9994 - val_loss: 0.4197 - val_accuracy: 0.9313
-Epoch 667/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9994 - val_loss: 0.4194 - val_accuracy: 0.9320
-Epoch 668/1000
-60000/60000 - 6s - loss: 0.0072 - accuracy: 0.9994 - val_loss: 0.4191 - val_accuracy: 0.9311
-Epoch 669/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4188 - val_accuracy: 0.9310
-Epoch 670/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4197 - val_accuracy: 0.9315
-Epoch 671/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9993 - val_loss: 0.4198 - val_accuracy: 0.9314
-Epoch 672/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4201 - val_accuracy: 0.9315
-Epoch 673/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4198 - val_accuracy: 0.9309
-Epoch 674/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4210 - val_accuracy: 0.9320
-Epoch 675/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4218 - val_accuracy: 0.9311
-Epoch 676/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4204 - val_accuracy: 0.9310
-Epoch 677/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4206 - val_accuracy: 0.9306
-Epoch 678/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4205 - val_accuracy: 0.9318
-Epoch 679/1000
-60000/60000 - 6s - loss: 0.0071 - accuracy: 0.9994 - val_loss: 0.4218 - val_accuracy: 0.9312
-Epoch 680/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4215 - val_accuracy: 0.9307
-Epoch 681/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4217 - val_accuracy: 0.9314
-Epoch 682/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4222 - val_accuracy: 0.9311
-Epoch 683/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4219 - val_accuracy: 0.9308
-Epoch 684/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4228 - val_accuracy: 0.9313
-Epoch 685/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4233 - val_accuracy: 0.9310
-Epoch 686/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4221 - val_accuracy: 0.9310
-Epoch 687/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4229 - val_accuracy: 0.9316
-Epoch 688/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4224 - val_accuracy: 0.9310
-Epoch 689/1000
-60000/60000 - 6s - loss: 0.0070 - accuracy: 0.9994 - val_loss: 0.4233 - val_accuracy: 0.9312
-Epoch 690/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4232 - val_accuracy: 0.9314
-Epoch 691/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4229 - val_accuracy: 0.9312
-Epoch 692/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4240 - val_accuracy: 0.9303
-Epoch 693/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4239 - val_accuracy: 0.9302
-Epoch 694/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4240 - val_accuracy: 0.9314
-Epoch 695/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4246 - val_accuracy: 0.9314
-Epoch 696/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4248 - val_accuracy: 0.9314
-Epoch 697/1000
-60000/60000 - 6s - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.4244 - val_accuracy: 0.9310
-Epoch 698/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4239 - val_accuracy: 0.9311
-Epoch 699/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4252 - val_accuracy: 0.9308
-Epoch 700/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4254 - val_accuracy: 0.9309
-Epoch 701/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4252 - val_accuracy: 0.9310
-Epoch 702/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4252 - val_accuracy: 0.9313
-Epoch 703/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4247 - val_accuracy: 0.9311
-Epoch 704/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4248 - val_accuracy: 0.9315
-Epoch 705/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4258 - val_accuracy: 0.9309
-Epoch 706/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4253 - val_accuracy: 0.9311
-Epoch 707/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4265 - val_accuracy: 0.9301
-Epoch 708/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4261 - val_accuracy: 0.9306
-Epoch 709/1000
-60000/60000 - 6s - loss: 0.0068 - accuracy: 0.9994 - val_loss: 0.4264 - val_accuracy: 0.9300
-Epoch 710/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4269 - val_accuracy: 0.9310
-Epoch 711/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4270 - val_accuracy: 0.9301
-Epoch 712/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4270 - val_accuracy: 0.9306
-Epoch 713/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4263 - val_accuracy: 0.9308
-Epoch 714/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4287 - val_accuracy: 0.9308
-Epoch 715/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4269 - val_accuracy: 0.9305
-Epoch 716/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4274 - val_accuracy: 0.9303
-Epoch 717/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4278 - val_accuracy: 0.9308
-Epoch 718/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4279 - val_accuracy: 0.9308
-Epoch 719/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4285 - val_accuracy: 0.9305
-Epoch 720/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4283 - val_accuracy: 0.9307
-Epoch 721/1000
-60000/60000 - 6s - loss: 0.0067 - accuracy: 0.9994 - val_loss: 0.4293 - val_accuracy: 0.9300
-Epoch 722/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4279 - val_accuracy: 0.9306
-Epoch 723/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4292 - val_accuracy: 0.9307
-Epoch 724/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4297 - val_accuracy: 0.9305
-Epoch 725/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4292 - val_accuracy: 0.9297
-Epoch 726/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4298 - val_accuracy: 0.9305
-Epoch 727/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4291 - val_accuracy: 0.9304
-Epoch 728/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4293 - val_accuracy: 0.9305
-Epoch 729/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4298 - val_accuracy: 0.9304
-Epoch 730/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4302 - val_accuracy: 0.9301
-Epoch 731/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4309 - val_accuracy: 0.9304
-Epoch 732/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.4304 - val_accuracy: 0.9303
-Epoch 733/1000
-60000/60000 - 6s - loss: 0.0066 - accuracy: 0.9995 - val_loss: 0.4297 - val_accuracy: 0.9303
-Epoch 734/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4310 - val_accuracy: 0.9303
-Epoch 735/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4308 - val_accuracy: 0.9300
-Epoch 736/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9995 - val_loss: 0.4309 - val_accuracy: 0.9303
-Epoch 737/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9995 - val_loss: 0.4316 - val_accuracy: 0.9308
-Epoch 738/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4316 - val_accuracy: 0.9305
-Epoch 739/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4313 - val_accuracy: 0.9301
-Epoch 740/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4307 - val_accuracy: 0.9304
-Epoch 741/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9995 - val_loss: 0.4311 - val_accuracy: 0.9303
-Epoch 742/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4316 - val_accuracy: 0.9304
-Epoch 743/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4329 - val_accuracy: 0.9301
-Epoch 744/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9994 - val_loss: 0.4316 - val_accuracy: 0.9301
-Epoch 745/1000
-60000/60000 - 6s - loss: 0.0065 - accuracy: 0.9995 - val_loss: 0.4328 - val_accuracy: 0.9305
-Epoch 746/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9994 - val_loss: 0.4314 - val_accuracy: 0.9305
-Epoch 747/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4322 - val_accuracy: 0.9303
-Epoch 748/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9994 - val_loss: 0.4327 - val_accuracy: 0.9300
-Epoch 749/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4322 - val_accuracy: 0.9298
-Epoch 750/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9994 - val_loss: 0.4328 - val_accuracy: 0.9304
-Epoch 751/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4325 - val_accuracy: 0.9301
-Epoch 752/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4330 - val_accuracy: 0.9298
-Epoch 753/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9994 - val_loss: 0.4336 - val_accuracy: 0.9297
-Epoch 754/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9994 - val_loss: 0.4334 - val_accuracy: 0.9298
-Epoch 755/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4332 - val_accuracy: 0.9303
-Epoch 756/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9994 - val_loss: 0.4341 - val_accuracy: 0.9308
-Epoch 757/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4334 - val_accuracy: 0.9303
-Epoch 758/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4332 - val_accuracy: 0.9300
-Epoch 759/1000
-60000/60000 - 6s - loss: 0.0064 - accuracy: 0.9995 - val_loss: 0.4337 - val_accuracy: 0.9298
-Epoch 760/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4346 - val_accuracy: 0.9300
-Epoch 761/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4345 - val_accuracy: 0.9303
-Epoch 762/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4353 - val_accuracy: 0.9299
-Epoch 763/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4356 - val_accuracy: 0.9299
-Epoch 764/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4346 - val_accuracy: 0.9302
-Epoch 765/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4356 - val_accuracy: 0.9296
-Epoch 766/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4363 - val_accuracy: 0.9304
-Epoch 767/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4352 - val_accuracy: 0.9302
-Epoch 768/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9994 - val_loss: 0.4362 - val_accuracy: 0.9299
-Epoch 769/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4350 - val_accuracy: 0.9302
-Epoch 770/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4365 - val_accuracy: 0.9303
-Epoch 771/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9994 - val_loss: 0.4366 - val_accuracy: 0.9295
-Epoch 772/1000
-60000/60000 - 6s - loss: 0.0063 - accuracy: 0.9995 - val_loss: 0.4368 - val_accuracy: 0.9300
-Epoch 773/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9994 - val_loss: 0.4359 - val_accuracy: 0.9301
-Epoch 774/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4374 - val_accuracy: 0.9299
-Epoch 775/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4364 - val_accuracy: 0.9303
-Epoch 776/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4371 - val_accuracy: 0.9298
-Epoch 777/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4375 - val_accuracy: 0.9304
-Epoch 778/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4372 - val_accuracy: 0.9304
-Epoch 779/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4376 - val_accuracy: 0.9302
-Epoch 780/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4374 - val_accuracy: 0.9303
-Epoch 781/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4376 - val_accuracy: 0.9299
-Epoch 782/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4376 - val_accuracy: 0.9300
-Epoch 783/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4378 - val_accuracy: 0.9306
-Epoch 784/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4383 - val_accuracy: 0.9300
-Epoch 785/1000
-60000/60000 - 6s - loss: 0.0062 - accuracy: 0.9995 - val_loss: 0.4381 - val_accuracy: 0.9296
-Epoch 786/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9994 - val_loss: 0.4379 - val_accuracy: 0.9299
-Epoch 787/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4378 - val_accuracy: 0.9302
-Epoch 788/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4400 - val_accuracy: 0.9299
-Epoch 789/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9994 - val_loss: 0.4389 - val_accuracy: 0.9300
-Epoch 790/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4385 - val_accuracy: 0.9302
-Epoch 791/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4378 - val_accuracy: 0.9298
-Epoch 792/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4394 - val_accuracy: 0.9295
-Epoch 793/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4394 - val_accuracy: 0.9298
-Epoch 794/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4394 - val_accuracy: 0.9302
-Epoch 795/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4396 - val_accuracy: 0.9303
-Epoch 796/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4392 - val_accuracy: 0.9299
-Epoch 797/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4398 - val_accuracy: 0.9307
-Epoch 798/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4400 - val_accuracy: 0.9301
-Epoch 799/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4406 - val_accuracy: 0.9305
-Epoch 800/1000
-60000/60000 - 6s - loss: 0.0061 - accuracy: 0.9995 - val_loss: 0.4402 - val_accuracy: 0.9300
-Epoch 801/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4408 - val_accuracy: 0.9306
-Epoch 802/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4407 - val_accuracy: 0.9302
-Epoch 803/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4407 - val_accuracy: 0.9302
-Epoch 804/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4408 - val_accuracy: 0.9300
-Epoch 805/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4419 - val_accuracy: 0.9298
-Epoch 806/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4418 - val_accuracy: 0.9298
-Epoch 807/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4418 - val_accuracy: 0.9297
-Epoch 808/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4421 - val_accuracy: 0.9300
-Epoch 809/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4422 - val_accuracy: 0.9301
-Epoch 810/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4422 - val_accuracy: 0.9301
-Epoch 811/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4425 - val_accuracy: 0.9302
-Epoch 812/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4424 - val_accuracy: 0.9305
-Epoch 813/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4429 - val_accuracy: 0.9302
-Epoch 814/1000
-60000/60000 - 6s - loss: 0.0060 - accuracy: 0.9995 - val_loss: 0.4424 - val_accuracy: 0.9305
-Epoch 815/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4430 - val_accuracy: 0.9297
-Epoch 816/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4432 - val_accuracy: 0.9301
-Epoch 817/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4436 - val_accuracy: 0.9297
-Epoch 818/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4430 - val_accuracy: 0.9300
-Epoch 819/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4438 - val_accuracy: 0.9300
-Epoch 820/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4432 - val_accuracy: 0.9299
-Epoch 821/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4441 - val_accuracy: 0.9298
-Epoch 822/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4449 - val_accuracy: 0.9300
-Epoch 823/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4442 - val_accuracy: 0.9304
-Epoch 824/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4442 - val_accuracy: 0.9304
-Epoch 825/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4447 - val_accuracy: 0.9298
-Epoch 826/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4447 - val_accuracy: 0.9303
-Epoch 827/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4449 - val_accuracy: 0.9302
-Epoch 828/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4443 - val_accuracy: 0.9302
-Epoch 829/1000
-60000/60000 - 6s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 0.4448 - val_accuracy: 0.9303
-Epoch 830/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4450 - val_accuracy: 0.9308
-Epoch 831/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4453 - val_accuracy: 0.9302
-Epoch 832/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4447 - val_accuracy: 0.9305
-Epoch 833/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4448 - val_accuracy: 0.9300
-Epoch 834/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4461 - val_accuracy: 0.9301
-Epoch 835/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4457 - val_accuracy: 0.9305
-Epoch 836/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4455 - val_accuracy: 0.9303
-Epoch 837/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4457 - val_accuracy: 0.9299
-Epoch 838/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4461 - val_accuracy: 0.9308
-Epoch 839/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4460 - val_accuracy: 0.9307
-Epoch 840/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4472 - val_accuracy: 0.9299
-Epoch 841/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4469 - val_accuracy: 0.9300
-Epoch 842/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4463 - val_accuracy: 0.9306
-Epoch 843/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4474 - val_accuracy: 0.9309
-Epoch 844/1000
-60000/60000 - 6s - loss: 0.0058 - accuracy: 0.9995 - val_loss: 0.4478 - val_accuracy: 0.9307
-Epoch 845/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4468 - val_accuracy: 0.9309
-Epoch 846/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4473 - val_accuracy: 0.9302
-Epoch 847/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4469 - val_accuracy: 0.9301
-Epoch 848/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4479 - val_accuracy: 0.9303
-Epoch 849/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4473 - val_accuracy: 0.9299
-Epoch 850/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4481 - val_accuracy: 0.9302
-Epoch 851/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4472 - val_accuracy: 0.9303
-Epoch 852/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4478 - val_accuracy: 0.9306
-Epoch 853/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4477 - val_accuracy: 0.9305
-Epoch 854/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4482 - val_accuracy: 0.9300
-Epoch 855/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4481 - val_accuracy: 0.9306
-Epoch 856/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4489 - val_accuracy: 0.9301
-Epoch 857/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4489 - val_accuracy: 0.9303
-Epoch 858/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4491 - val_accuracy: 0.9306
-Epoch 859/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4487 - val_accuracy: 0.9302
-Epoch 860/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4492 - val_accuracy: 0.9303
-Epoch 861/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4498 - val_accuracy: 0.9303
-Epoch 862/1000
-60000/60000 - 6s - loss: 0.0057 - accuracy: 0.9995 - val_loss: 0.4493 - val_accuracy: 0.9305
-Epoch 863/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4492 - val_accuracy: 0.9305
-Epoch 864/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4494 - val_accuracy: 0.9297
-Epoch 865/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4492 - val_accuracy: 0.9301
-Epoch 866/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4498 - val_accuracy: 0.9300
-Epoch 867/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4494 - val_accuracy: 0.9296
-Epoch 868/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4500 - val_accuracy: 0.9302
-Epoch 869/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4499 - val_accuracy: 0.9299
-Epoch 870/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4509 - val_accuracy: 0.9300
-Epoch 871/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4507 - val_accuracy: 0.9303
-Epoch 872/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4503 - val_accuracy: 0.9302
-Epoch 873/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4504 - val_accuracy: 0.9300
-Epoch 874/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4512 - val_accuracy: 0.9303
-Epoch 875/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4505 - val_accuracy: 0.9300
-Epoch 876/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4509 - val_accuracy: 0.9306
-Epoch 877/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4514 - val_accuracy: 0.9304
-Epoch 878/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4508 - val_accuracy: 0.9302
-Epoch 879/1000
-60000/60000 - 6s - loss: 0.0056 - accuracy: 0.9995 - val_loss: 0.4507 - val_accuracy: 0.9297
-Epoch 880/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4519 - val_accuracy: 0.9300
-Epoch 881/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4516 - val_accuracy: 0.9302
-Epoch 882/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4523 - val_accuracy: 0.9297
-Epoch 883/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4521 - val_accuracy: 0.9300
-Epoch 884/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4526 - val_accuracy: 0.9293
-Epoch 885/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4524 - val_accuracy: 0.9301
-Epoch 886/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4523 - val_accuracy: 0.9303
-Epoch 887/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4530 - val_accuracy: 0.9300
-Epoch 888/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4527 - val_accuracy: 0.9300
-Epoch 889/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4530 - val_accuracy: 0.9303
-Epoch 890/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4530 - val_accuracy: 0.9301
-Epoch 891/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4528 - val_accuracy: 0.9300
-Epoch 892/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4531 - val_accuracy: 0.9300
-Epoch 893/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4543 - val_accuracy: 0.9300
-Epoch 894/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4529 - val_accuracy: 0.9298
-Epoch 895/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4531 - val_accuracy: 0.9300
-Epoch 896/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4527 - val_accuracy: 0.9303
-Epoch 897/1000
-60000/60000 - 6s - loss: 0.0055 - accuracy: 0.9995 - val_loss: 0.4537 - val_accuracy: 0.9306
-Epoch 898/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4538 - val_accuracy: 0.9300
-Epoch 899/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4537 - val_accuracy: 0.9303
-Epoch 900/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4549 - val_accuracy: 0.9299
-Epoch 901/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4546 - val_accuracy: 0.9300
-Epoch 902/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4548 - val_accuracy: 0.9294
-Epoch 903/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4544 - val_accuracy: 0.9296
-Epoch 904/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4549 - val_accuracy: 0.9301
-Epoch 905/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4542 - val_accuracy: 0.9303
-Epoch 906/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4545 - val_accuracy: 0.9299
-Epoch 907/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4542 - val_accuracy: 0.9300
-Epoch 908/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4545 - val_accuracy: 0.9306
-Epoch 909/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4549 - val_accuracy: 0.9306
-Epoch 910/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4550 - val_accuracy: 0.9303
-Epoch 911/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4557 - val_accuracy: 0.9304
-Epoch 912/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4549 - val_accuracy: 0.9302
-Epoch 913/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4556 - val_accuracy: 0.9302
-Epoch 914/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4555 - val_accuracy: 0.9300
-Epoch 915/1000
-60000/60000 - 6s - loss: 0.0054 - accuracy: 0.9995 - val_loss: 0.4560 - val_accuracy: 0.9307
-Epoch 916/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4549 - val_accuracy: 0.9302
-Epoch 917/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4552 - val_accuracy: 0.9299
-Epoch 918/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4557 - val_accuracy: 0.9301
-Epoch 919/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4561 - val_accuracy: 0.9302
-Epoch 920/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4568 - val_accuracy: 0.9300
-Epoch 921/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4559 - val_accuracy: 0.9301
-Epoch 922/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4563 - val_accuracy: 0.9300
-Epoch 923/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4574 - val_accuracy: 0.9302
-Epoch 924/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4569 - val_accuracy: 0.9301
-Epoch 925/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4568 - val_accuracy: 0.9297
-Epoch 926/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4576 - val_accuracy: 0.9295
-Epoch 927/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4576 - val_accuracy: 0.9300
-Epoch 928/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4584 - val_accuracy: 0.9304
-Epoch 929/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4576 - val_accuracy: 0.9297
-Epoch 930/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4569 - val_accuracy: 0.9294
-Epoch 931/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4578 - val_accuracy: 0.9303
-Epoch 932/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4578 - val_accuracy: 0.9301
-Epoch 933/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4587 - val_accuracy: 0.9298
-Epoch 934/1000
-60000/60000 - 6s - loss: 0.0053 - accuracy: 0.9995 - val_loss: 0.4578 - val_accuracy: 0.9298
-Epoch 935/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4585 - val_accuracy: 0.9302
-Epoch 936/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4582 - val_accuracy: 0.9301
-Epoch 937/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4586 - val_accuracy: 0.9298
-Epoch 938/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4583 - val_accuracy: 0.9297
-Epoch 939/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4591 - val_accuracy: 0.9299
-Epoch 940/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4585 - val_accuracy: 0.9301
-Epoch 941/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4591 - val_accuracy: 0.9294
-Epoch 942/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4597 - val_accuracy: 0.9302
-Epoch 943/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4594 - val_accuracy: 0.9297
-Epoch 944/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4593 - val_accuracy: 0.9299
-Epoch 945/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4603 - val_accuracy: 0.9296
-Epoch 946/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4594 - val_accuracy: 0.9296
-Epoch 947/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4599 - val_accuracy: 0.9295
-Epoch 948/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4597 - val_accuracy: 0.9295
-Epoch 949/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4594 - val_accuracy: 0.9299
-Epoch 950/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4604 - val_accuracy: 0.9296
-Epoch 951/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4605 - val_accuracy: 0.9295
-Epoch 952/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4606 - val_accuracy: 0.9295
-Epoch 953/1000
-60000/60000 - 6s - loss: 0.0052 - accuracy: 0.9995 - val_loss: 0.4601 - val_accuracy: 0.9292
-Epoch 954/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4595 - val_accuracy: 0.9297
-Epoch 955/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4614 - val_accuracy: 0.9300
-Epoch 956/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4610 - val_accuracy: 0.9300
-Epoch 957/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4613 - val_accuracy: 0.9296
-Epoch 958/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4608 - val_accuracy: 0.9299
-Epoch 959/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4609 - val_accuracy: 0.9298
-Epoch 960/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4619 - val_accuracy: 0.9295
-Epoch 961/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4614 - val_accuracy: 0.9295
-Epoch 962/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4611 - val_accuracy: 0.9296
-Epoch 963/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4615 - val_accuracy: 0.9293
-Epoch 964/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4620 - val_accuracy: 0.9296
-Epoch 965/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4617 - val_accuracy: 0.9290
-Epoch 966/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4631 - val_accuracy: 0.9295
-Epoch 967/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4613 - val_accuracy: 0.9297
-Epoch 968/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4622 - val_accuracy: 0.9293
-Epoch 969/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4626 - val_accuracy: 0.9296
-Epoch 970/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4627 - val_accuracy: 0.9297
-Epoch 971/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4627 - val_accuracy: 0.9290
-Epoch 972/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4630 - val_accuracy: 0.9292
-Epoch 973/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4632 - val_accuracy: 0.9295
-Epoch 974/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4629 - val_accuracy: 0.9296
-Epoch 975/1000
-60000/60000 - 6s - loss: 0.0051 - accuracy: 0.9995 - val_loss: 0.4632 - val_accuracy: 0.9297
-Epoch 976/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4631 - val_accuracy: 0.9291
-Epoch 977/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4632 - val_accuracy: 0.9293
-Epoch 978/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4644 - val_accuracy: 0.9299
-Epoch 979/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4642 - val_accuracy: 0.9295
-Epoch 980/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4636 - val_accuracy: 0.9288
-Epoch 981/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4643 - val_accuracy: 0.9297
-Epoch 982/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4637 - val_accuracy: 0.9298
-Epoch 983/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4643 - val_accuracy: 0.9295
-Epoch 984/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4647 - val_accuracy: 0.9294
-Epoch 985/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4648 - val_accuracy: 0.9293
-Epoch 986/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4653 - val_accuracy: 0.9296
-Epoch 987/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4650 - val_accuracy: 0.9291
-Epoch 988/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4650 - val_accuracy: 0.9294
-Epoch 989/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4640 - val_accuracy: 0.9295
-Epoch 990/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4646 - val_accuracy: 0.9297
-Epoch 991/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4645 - val_accuracy: 0.9294
-Epoch 992/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4650 - val_accuracy: 0.9294
-Epoch 993/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4651 - val_accuracy: 0.9295
-Epoch 994/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4650 - val_accuracy: 0.9295
-Epoch 995/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4647 - val_accuracy: 0.9288
-Epoch 996/1000
-60000/60000 - 6s - loss: 0.0050 - accuracy: 0.9995 - val_loss: 0.4652 - val_accuracy: 0.9295
-Epoch 997/1000
-60000/60000 - 6s - loss: 0.0049 - accuracy: 0.9995 - val_loss: 0.4659 - val_accuracy: 0.9296
-Epoch 998/1000
-60000/60000 - 6s - loss: 0.0049 - accuracy: 0.9995 - val_loss: 0.4658 - val_accuracy: 0.9295
-Epoch 999/1000
-60000/60000 - 6s - loss: 0.0049 - accuracy: 0.9995 - val_loss: 0.4657 - val_accuracy: 0.9291
-Epoch 1000/1000
-60000/60000 - 6s - loss: 0.0049 - accuracy: 0.9995 - val_loss: 0.4661 - val_accuracy: 0.9291
-Test loss was 0.1903, test accuracy was 0.9462
diff --git a/MNIST/plot.py b/MNIST/plot.py
deleted file mode 100644
index 1f93fe3..0000000
--- a/MNIST/plot.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import numpy as np
-import matplotlib.pyplot as plt
-
-
-def get_accuracy(log_dir_path):
- filename = "{}/accuracy.csv".format(log_dir_path)
- csv = np.genfromtxt(filename, delimiter=",")
- accuracy = 100*csv
- return accuracy
-
-def get_error(log_dir_path):
- accuracy = get_accuracy(log_dir_path)
- error = 100 - accuracy
- return error
-
-def get_power(log_dir_path):
- filename = "{}/power.csv".format(log_dir_path)
- csv = np.genfromtxt(filename, delimiter=",")
- power = csv[0::2] + csv[1::2]
- return power
-
-
-dataset = "MNIST"
-network_types = ["regular", "non-regularized-aware", "regularized-aware"]
-group_idxs = [0, 2]
-colors = ["#e69f00", "#0072b2", "#009e73"]
-labels = ["Standard", "Nonideality-aware (non-regularised)", "Nonideality-aware (regularised)"]
-
-axis_label_font_size = 14
-legend_font_size = 12
-ticks_font_size = 10
-markersize = 7
-
-fig, ax = plt.subplots()
-boxplots = []
-
-for network_idx, network_type in enumerate(network_types):
- for group_idx in group_idxs:
- log_dir_path = "models/{}/{}/group-{}".format(
- dataset, network_type, group_idx)
- power = get_power(log_dir_path)
- error = get_error(log_dir_path)
- print("network_type:", network_type, "group_idx:", group_idx, "median error:", np.median(error), "median power:", np.median(power))
- # plt.scatter(power, accuracy, marker="x", s=markersize, color=colors[network_idx], label=labels[network_idx])
- w = 0.1
- x_pos = np.median(power)
- boxplots.append(plt.boxplot(error, positions=[x_pos], widths=[10**(np.log10(x_pos)+w/2.)-10**(np.log10(x_pos)-w/2.)], sym=colors[network_idx]))
- bplot = boxplots[-1]
- plt.setp(bplot['fliers'], marker='x', markersize=4)
- for element in ['boxes', 'whiskers', 'fliers', 'means', 'medians', 'caps']:
- plt.setp(bplot[element], color=colors[network_idx])
-
-
-ax.legend([boxplot["boxes"][0] for boxplot in boxplots[::len(group_idxs)]],
- [label for label in labels], fontsize=legend_font_size)
-plt.xticks(fontsize=ticks_font_size)
-plt.yticks(fontsize=ticks_font_size)
-plt.xlabel("Ohmic power consumption (W)", fontsize=axis_label_font_size)
-plt.ylabel("Inference test error (%)", fontsize=axis_label_font_size)
-plt.semilogx()
-plt.semilogy()
-# x.ticklabel_format(style='plain')
-# ax.get_yaxis().get_major_formatter().set_scientific(False)
-# plt.show()
-plt.savefig("error-box-plot-two-groups.pdf", bbox_inches='tight')
-
diff --git a/README.md b/README.md
index 18ab05f..c6eb6a9 100644
--- a/README.md
+++ b/README.md
@@ -1,19 +1,40 @@
-# Adjusting Training to Enable Accurate Low-Power Memristive Neural Networks
+# Nonideality-Aware Training for Accurate and Robust Low-Power Memristive Neural Networks
## Requirements
-TensorFlow 2.0 or higher.
+Python ≥3.9 and the packages listed in [requirements.txt](/requirements.txt).
-## Test
+## Repository structure
-To train, go to `MNIST`, set `path_to_project` in `MNIST/Train.py` and then run `python Train.py`.
+`awarememristor/crossbar`: memristor nonidealities and mapping onto crossbar arrays.
-## Repo organisation
+`awarememristor/training`: network training.
-`model_architectures.py`: model topology.
+`awarememristor/simulations`: simulations presented in the manuscript.
-`memristor_utils.py`: custom layers including `memristor_dense`.
+`awarememristor/plotting`: figures presented in the manuscript.
-`crossbar`: mapping and nonidealities.
+## Reproducing results
-`MNIST/Train.py`: training setup.
+Script [reproduce_paper.py](/reproduce_paper.py) can be used to reproduce the simulations and plots presented in the manuscript.
+Please follow the instructions in the script to obtain any missing experimental data (or comment out the function calls that require these data).
+After that, execute
+```text
+python reproduce_paper.py
+```
+
+This might take a long time to finish, so you may want to split this file up in order to, for example, perform the simulations on multiple machines.
+
+## Testing
+
+To run unit tests, execute
+```text
+pytest tests
+```
+
+## Using this package
+
+**This package should not be used in production.**
+The code is extensible but was written mostly with specific [simulations](/awarememristor/simulations) in mind.
+Any new functionality (such as different nonidealities) should be incorporated carefully.
+For example, to handle combinations of *multiple* linearity-preserving nonidealities (which is not currently supported), it may *not* be sufficient to simply apply them one after another.
diff --git a/awarememristor/__init__.py b/awarememristor/__init__.py
new file mode 100644
index 0000000..5f986ab
--- /dev/null
+++ b/awarememristor/__init__.py
@@ -0,0 +1 @@
+from awarememristor import crossbar, plotting, simulations, training
diff --git a/awarememristor/crossbar/__init__.py b/awarememristor/crossbar/__init__.py
new file mode 100644
index 0000000..7731ce1
--- /dev/null
+++ b/awarememristor/crossbar/__init__.py
@@ -0,0 +1,3 @@
+import awarememristor.crossbar.ideal
+import awarememristor.crossbar.map
+import awarememristor.crossbar.nonidealities
diff --git a/awarememristor/crossbar/ideal.py b/awarememristor/crossbar/ideal.py
new file mode 100644
index 0000000..41e620c
--- /dev/null
+++ b/awarememristor/crossbar/ideal.py
@@ -0,0 +1,33 @@
+import tensorflow as tf
+
+from awarememristor.crossbar import utils
+
+
+def compute_I_all(V: tf.Tensor, G: tf.Tensor) -> tf.Tensor:
+ """Compute output and device currents of an ideal crossbar.
+
+ Args:
+ V: Voltages of shape `p x m`.
+ G: Conductances of shape `m x n`.
+
+ Returns:
+ I: Output currents of shape `p x n`.
+ I_ind: Currents of shape `p x m x n` produced by each of the conductances in the crossbar
+ array.
+ """
+ I_ind = tf.expand_dims(V, axis=-1) * tf.expand_dims(G, axis=0)
+ I = utils.add_I_BL(I_ind)
+ return I, I_ind
+
+
+def compute_I(V: tf.Tensor, G: tf.Tensor) -> tf.Tensor:
+ """Compute output currents of an ideal crossbar.
+
+ Args:
+ V: Voltages of shape `p x m`.
+ G: Conductances of shape `m x n`.
+
+ Returns:
+ Output currents of shape `p x n`.
+ """
+ return tf.tensordot(V, G, axes=1)
diff --git a/awarememristor/crossbar/map.py b/awarememristor/crossbar/map.py
new file mode 100644
index 0000000..261d1d2
--- /dev/null
+++ b/awarememristor/crossbar/map.py
@@ -0,0 +1,145 @@
+import tensorflow as tf
+
+
+def I_to_y(I: tf.Tensor, k_V: float, max_weight: float, G_on: float, G_off: float) -> tf.Tensor:
+ """Convert output currents of a dot-product engine onto synaptic layer inputs.
+
+ Args:
+ I: Output currents of shape `p x 2n`
+ k_V: Voltage scaling factor.
+ max_weight: Assumed maximum weight.
+ G_on: Memristor conductance in ON state.
+ G_off: Memristor conductance in OFF state.
+
+ Returns:
+ Outputs of shape `p x n` of a synaptic layer implemented using memristive crossbars.
+ """
+ I_total = I[:, 0::2] - I[:, 1::2]
+ y = I_total_to_y(I_total, k_V, max_weight, G_on, G_off)
+ return y
+
+
+def I_total_to_y(
+ I_total: tf.Tensor, k_V: float, max_weight: float, G_on: float, G_off: float
+) -> tf.Tensor:
+ """Convert total output currents of a dot-product engine onto synaptic layer inputs.
+
+ Args:
+ I_total: Total output currents of shape `p x n`
+ k_V: Voltage scaling factor.
+ max_weight: Assumed maximum weight.
+ G_on: Memristor conductance in ON state.
+ G_off: Memristor conductance in OFF state.
+
+ Returns:
+ Outputs of shape `p x n` of a synaptic layer implemented using memristive crossbars.
+ """
+ k_G = _compute_k_G(max_weight, G_on, G_off)
+ k_I = _compute_k_I(k_V, k_G)
+ y = I_total / k_I
+ return y
+
+
+def _compute_k_G(max_weight: float, G_on: float, G_off: float) -> float:
+ """Compute conductance scaling factor.
+
+ Args:
+ max_weight: Assumed maximum weight.
+ G_on: Memristor conductance in ON state.
+ G_off: Memristor conductance in OFF state.
+
+ Returns:
+ Conductance scaling factor.
+ """
+ k_G = (G_on - G_off) / max_weight
+
+ return k_G
+
+
+def _compute_k_I(k_V: float, k_G: float) -> float:
+ """Compute current scaling factor.
+
+ Args:
+ k_V: Voltage scaling factor.
+ k_G: Conductance scaling factor.
+
+ Returns:
+ Current scaling factor.
+ """
+ return k_V * k_G
+
+
+def x_to_V(x: tf.Tensor, k_V: float) -> tf.Tensor:
+ """Map inputs (to a synaptic layer) onto voltages.
+
+ Args:
+ x: Synaptic inputs.
+ k_V: Voltage scaling factor.
+
+ Returns:
+ Voltages.
+ """
+ return k_V * x
+
+
+def double_w_to_G(double_w: tf.Tensor, G_off: float, G_on: float) -> tf.Tensor:
+ """Map double weights onto conductances.
+
+ Args:
+ double_w: Double weights of shape `m x 2n`. These are used to train
+ each conductance (instead of pair of conductances) directly.
+ G_off: Memristor conductance in OFF state.
+ G_on: Memristor conductance in ON state.
+
+ Returns:
+ G: Conductances of shape `m x 2n`.
+ max_weight: Assumed maximum weight.
+ """
+ max_weight = tf.math.reduce_max(double_w)
+ k_G = _compute_k_G(max_weight, G_on, G_off)
+ G = k_G * double_w + G_off
+
+ return G, max_weight
+
+
+@tf.function
+def w_to_G(
+ weights: tf.Tensor, G_off: float, G_on: float, mapping_rule: str = "default"
+) -> tf.Tensor:
+ """Map weights onto conductances.
+
+ Args:
+ weights: Weights of shape `m x n`.
+ G_off: Memristor conductance in OFF state.
+ G_on: Memristor conductance in ON state.
+ mapping_rule: One of `("default", "avg")`.
+
+ Returns:
+ G: Conductances of shape `m x 2n`.
+ max_weight: Assumed maximum weight.
+ """
+ max_weight = tf.math.reduce_max(tf.math.abs(weights))
+
+ k_G = _compute_k_G(max_weight, G_on, G_off)
+ G_eff = k_G * weights
+
+ if mapping_rule == "default":
+ # We implement the pairs by choosing the lowest possible conductances.
+ G_pos = tf.math.maximum(G_eff, 0.0) + G_off
+ G_neg = -tf.math.minimum(G_eff, 0.0) + G_off
+ elif mapping_rule == "avg":
+ # We map the pairs symmetrically around `G_avg`.
+ G_avg = (G_off + G_on) / 2
+ G_pos = G_avg + 0.5 * G_eff
+ G_neg = G_avg - 0.5 * G_eff
+ else:
+ raise ValueError(f"Mapping rule {mapping_rule} is not recognized!")
+
+ # Odd columns dedicated to positive weights.
+ # Even columns dedicated to negative weights.
+ G = tf.reshape(
+ tf.concat([G_pos[..., tf.newaxis], G_neg[..., tf.newaxis]], axis=-1),
+ [tf.shape(G_pos)[0], -1],
+ )
+
+ return G, max_weight
diff --git a/awarememristor/crossbar/nonidealities.py b/awarememristor/crossbar/nonidealities.py
new file mode 100644
index 0000000..1172cd4
--- /dev/null
+++ b/awarememristor/crossbar/nonidealities.py
@@ -0,0 +1,266 @@
+from abc import ABC, abstractmethod
+
+import numpy as np
+import tensorflow as tf
+import tensorflow_probability as tfp
+from KDEpy import bw_selection
+from tensorflow.python.ops.numpy_ops import np_config
+from tensorflow_probability import distributions as tfd
+
+from awarememristor.crossbar import utils
+
+
+class Nonideality(ABC):
+ """Physical effect that influences the behavior of memristive devices."""
+
+ @abstractmethod
+ def label(self) -> str:
+ """Returns nonideality label used in directory names, for example."""
+
+ def __eq__(self, other):
+ if self is None or other is None:
+ if self is None and other is None:
+ return True
+ return False
+ return self.label() == other.label()
+
+
+class LinearityPreserving(ABC):
+ """Nonideality whose effect can be simulated by disturbing the conductances."""
+
+ @abstractmethod
+ def disturb_G(self, G: tf.Tensor) -> tf.Tensor:
+ """Disturb conductances."""
+
+
+class LinearityNonpreserving(ABC):
+ """Nonideality in which nonlinearity manifests itself in individual devices
+ and the output current of a device is a function of its conductance
+ parameter and the voltage applied across it."""
+
+ @abstractmethod
+ def compute_I(self, V: tf.Tensor, G: tf.Tensor) -> tuple[tf.Tensor, tf.Tensor]:
+ """Compute currents in a crossbar suffering from linearity-nonpreserving nonideality.
+
+ Args:
+ V: Voltages of shape `p x m`.
+ G: Conductances of shape `m x n`.
+
+ Returns:
+ I: Output currents of shape `p x n`.
+ I_ind: Currents of shape `p x m x n` produced by each of the
+ conductances in the crossbar array.
+ """
+
+ @abstractmethod
+ def k_V(self) -> float:
+ """Return voltage scaling factor."""
+
+
+class IVNonlinearity(Nonideality, LinearityNonpreserving):
+ """Uses nonlinearity parameter to model deviations from ohmic I-V behavior.
+
+ Nonlinearity parameter `n` is defined as the current generated by a
+ resistive device at voltage `2*V` divided by the current generated by the
+ device at voltage `V`. We introduce voltage `V_ref` at which the generated
+ amount of current equals the expected amount described by Ohm's law, i.e.
+ `I = V*G`.
+ """
+
+ def __init__(self, V_ref: float, n_avg: float, n_std: float) -> None:
+ self.V_ref = V_ref
+ self.n_avg = n_avg
+ self.n_std = n_std
+
+ def label(self):
+ return f"IVNL:{self.n_avg:.3g}_{self.n_std:.3g}"
+
+ def compute_I(self, V, G):
+ # n <= 1 would produce unrealistic behaviour, while 1 < n < 2 is not typical in I-V curves
+ distribution = tfd.TruncatedNormal(
+ loc=self.n_avg,
+ scale=self.n_std,
+ low=2.0,
+ high=np.inf,
+ )
+ n = distribution.sample(sample_shape=G.get_shape().as_list())
+
+ sign = tf.expand_dims(tf.sign(V), axis=-1)
+ ohmic_current = sign * self.V_ref * tf.expand_dims(G, axis=0)
+ # Take absolute value of V to prevent negative numbers from being raised to
+ # a negative power. We assume symmetrical behaviour with negative voltages.
+ ratio = tf.expand_dims(tf.abs(V) / self.V_ref, axis=-1)
+ exponent = utils.tf_log2(n)
+
+ I_ind = ohmic_current * ratio ** exponent
+
+ I = utils.add_I_BL(I_ind)
+
+ return I, I_ind
+
+ def k_V(self):
+ return 2 * self.V_ref
+
+
+class StuckAt(Nonideality, LinearityPreserving):
+ """Models a fraction of the devices as stuck in one conductance state."""
+
+ def __init__(self, value: float, probability: float) -> None:
+ """
+ Args:
+ value: Conductance value to set randomly selected devices to.
+ probability: Probability that a given device will be set to `val`.
+ Probability must be in the [0.0, 1.0] range.
+ """
+ self.value = value
+ self.probability = probability
+
+ def label(self):
+ return f"Stuck:{self.value:.3g}_{self.probability:.3g}"
+
+ def disturb_G(self, G):
+ mask = utils.random_bool_tensor(G.shape, self.probability)
+ G = tf.where(mask, self.value, G)
+ return G
+
+
+class StuckAtGOff(StuckAt):
+ """Models a fraction of the devices as stuck at `G_off`."""
+
+ def __init__(self, G_off: float, probability: float) -> None:
+ StuckAt.__init__(self, G_off, probability)
+
+ def label(self):
+ return f"StuckOff:{self.probability:.3g}"
+
+
+class StuckAtGOn(StuckAt):
+ """Models a fraction of the devices as stuck at `G_on`."""
+
+ def __init__(self, G_on: float, probability: float) -> None:
+ StuckAt.__init__(self, G_on, probability)
+
+ def label(self):
+ return f"StuckOn:{self.probability:.3g}"
+
+
+class StuckDistribution(Nonideality, LinearityPreserving):
+ """Models a fraction of the devices as stuck at conductance states drawn
+ from a random distribution.
+
+ Kernel density estimation (KDE) with truncated normal distributions is
+ constructed using a list of conductance values at which the devices got
+ stuck.
+ """
+
+ def __init__(
+ self, means: list[float], probability: float, bandwidth_def=bw_selection.scotts_rule
+ ):
+ """
+ Args:
+ means: Means of underlying normal distributions.
+ probability: Probability that a given device will get stuck.
+ bandwidth_def: Function used to determine the bandwidth parameter of KDE.
+ """
+ bandwidth = bandwidth_def(np.reshape(means, (len(means), 1)))
+ self.probability = probability
+ self.bandwidth = bandwidth
+ self.distribution = self._kde(means, bandwidth)
+
+ def label(self) -> str:
+ return f"StuckDistr:{self.probability:.3g}_{self.bandwidth:.3g}"
+
+ @staticmethod
+ def _kde(means: list[float], bandwidth: float) -> tfd.Distribution:
+ """Kernel density estimation.
+
+ Args:
+ means: Means of underlying normal distributions.
+ bandwidth: Standard deviation of underlying normal distributions.
+
+ Returns:
+ KDE distribution.
+ """
+ weights = []
+ distr_means = []
+ for mean in means:
+ distr_means.append(mean)
+ prob_neg = tfd.Normal(loc=mean, scale=bandwidth).cdf(0.0)
+ # To ensure numerical stability, only include reflection if it will
+ # have a non-negligible effect.
+ if prob_neg > 1e-8:
+ distr_means.append(-mean)
+ prob_pos = 1.0 - prob_neg
+ weights.extend([prob_pos, prob_neg])
+ else:
+ weights.append(1.0)
+
+ np_config.enable_numpy_behavior()
+ distr_means_32 = tf.constant(distr_means)
+ bandwidth_32 = tf.constant(bandwidth)
+
+ kde_distribution = tfd.MixtureSameFamily(
+ tfd.Categorical(probs=weights),
+ tfd.TruncatedNormal(
+ loc=distr_means_32.astype(tf.float32),
+ scale=bandwidth_32.astype(tf.float32),
+ low=0.0,
+ high=np.inf,
+ ),
+ )
+
+ return kde_distribution
+
+ def disturb_G(self, G):
+ mask = utils.random_bool_tensor(G.shape, self.probability)
+ idxs = tf.where(mask)
+ zeroed_G = tf.where(mask, 0.0, G)
+ stuck_G = self.distribution.sample(tf.math.count_nonzero(mask))
+ disturbed_G = zeroed_G + tf.scatter_nd(idxs, stuck_G, G.shape)
+ return disturbed_G
+
+
+class D2DLognormal(Nonideality, LinearityPreserving):
+ """Models D2D programming variability as lognormal deviations of resistances."""
+
+ def __init__(
+ self, G_off: float, G_on: float, R_on_log_std: float, R_off_log_std: float
+ ) -> None:
+ """
+ Args:
+ G_on: Memristor conductance in ON state.
+ G_off: Memristor conductance in OFF state.
+ R_on_log_std: Standard deviation of the (lognormal distribution's) underlying normal
+ distribution associated with R_on (i.e. 1/G_on).
+ R_off_log_std: Standard deviation of the (lognormal distribution's) underlying normal
+ distribution associated with R_off (i.e. 1/G_off).
+ """
+ self.G_off = G_off
+ self.G_on = G_on
+ self.R_on_log_std = R_on_log_std
+ self.R_off_log_std = R_off_log_std
+
+ def label(self):
+ return f"D2DLN:{self.R_on_log_std:.3g}_{self.R_off_log_std:.3g}"
+
+ def disturb_G(self, G):
+ R = 1 / G
+ R_on = 1 / self.G_on
+ R_off = 1 / self.G_off
+
+ # Piece-wise linear interpolation.
+ log_std_ref = [self.R_on_log_std, self.R_off_log_std]
+ log_std = tfp.math.interp_regular_1d_grid(R, R_on, R_off, log_std_ref)
+
+ # Lognormal modelling.
+ R_squared = tf.math.pow(R, 2)
+ log_var = tf.math.pow(log_std, 2)
+ # Because $\sigma = \ln ( 1 + \frac{\sigma_X^2}{\mu_X^2} )$,
+ # $\sigma_X^2 = \mu_X^2 (e^{\sigma^2} - 1)$.
+ R_var = R_squared * (tf.math.exp(log_var) - 1.0)
+ log_mu = tf.math.log(R_squared / tf.math.sqrt(R_squared + R_var))
+ R = tfd.LogNormal(log_mu, log_std, validate_args=True).sample()
+
+ G = 1 / R
+
+ return G
diff --git a/awarememristor/crossbar/utils.py b/awarememristor/crossbar/utils.py
new file mode 100644
index 0000000..4bef9a6
--- /dev/null
+++ b/awarememristor/crossbar/utils.py
@@ -0,0 +1,38 @@
+import tensorflow as tf
+
+
+def tf_log2(x: tf.Tensor) -> tf.Tensor:
+ """Compute logarithm of base 2 of `x`.
+
+ Args:
+ x: An array.
+ """
+ numerator = tf.math.log(x)
+ denominator = tf.math.log(tf.constant(2, dtype=numerator.dtype))
+ return numerator / denominator
+
+
+def add_I_BL(I_ind: tf.Tensor) -> tf.Tensor:
+ """Add currents along the bit lines.
+
+ Args:
+ I_ind: Currents of shape `p x m x n` produced by each of the conductances in the crossbar
+ array.
+
+ Returns:
+ Output currents of shape `p x n`.
+ """
+ I = tf.math.reduce_sum(I_ind, axis=1)
+ return I
+
+
+def random_bool_tensor(shape: list[int], prob_true: float) -> tf.Tensor:
+ """Return random boolean tensor.
+
+ Args:
+ shape: Tensor shape.
+ prob_true: Probability that a given entry is going to be True. Probability must be in the
+ [0.0, 1.0] range.
+ """
+ random_float_tensor = tf.random.uniform(shape, minval=0, maxval=1, dtype=tf.dtypes.float64)
+ return random_float_tensor < prob_true
diff --git a/awarememristor/plotting/__init__.py b/awarememristor/plotting/__init__.py
new file mode 100644
index 0000000..5723f87
--- /dev/null
+++ b/awarememristor/plotting/__init__.py
@@ -0,0 +1 @@
+from awarememristor.plotting import figures, supporting_figures
diff --git a/awarememristor/plotting/figures.py b/awarememristor/plotting/figures.py
new file mode 100644
index 0000000..1452e4e
--- /dev/null
+++ b/awarememristor/plotting/figures.py
@@ -0,0 +1,459 @@
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from matplotlib import rc
+from matplotlib.lines import Line2D
+
+from awarememristor import crossbar, simulations
+from awarememristor.crossbar.nonidealities import StuckDistribution
+from awarememristor.plotting import utils
+from awarememristor.training import architecture
+
+
+def _SiO_x_panels(fig, axes):
+ data = simulations.data.load_SiO_x_multistate()
+
+ N = 1000
+ palette = plt.cm.inferno(np.linspace(0, 1, N))
+ min_voltage, max_voltage = 0.0, 0.5
+
+ curves = simulations.data.low_high_n_SiO_x_curves(data)
+ for axis, (voltages, currents) in zip(axes, curves):
+ for idx in range(voltages.shape[0]):
+ voltage_curve = voltages[idx, :]
+ current_curve = currents[idx, :]
+ n = simulations.data.nonlinearity_parameter(current_curve)
+ palette_idx = int(np.floor(N * (n - 2) / 2))
+ axis.plot(
+ voltage_curve,
+ current_curve,
+ color=palette[palette_idx],
+ linewidth=utils.Config.LINEWIDTH,
+ )
+
+ axis.set_xlim([min_voltage, max_voltage])
+ axis.set_ylim(bottom=0)
+ axis.set_xlabel(utils.axis_label("voltage"))
+ axis.ticklabel_format(axis="y", scilimits=(-1, 1))
+ axis.yaxis.get_offset_text().set_fontsize(utils.Config.TICKS_FONT_SIZE)
+
+ sm = plt.cm.ScalarMappable(cmap="inferno", norm=plt.Normalize(vmin=2, vmax=4))
+ cbar = fig.colorbar(sm, ax=axes)
+ cbar.set_label(
+ label=utils.axis_label("nonlinearity-parameter"),
+ fontsize=utils.Config.AXIS_LABEL_FONT_SIZE,
+ rotation=-90,
+ va="bottom",
+ )
+ cbar.ax.tick_params(axis="both", which="both", labelsize=utils.Config.TICKS_FONT_SIZE)
+
+ axes[0].set_ylabel(utils.axis_label("current"))
+
+
+def _HfO2_panels(fig, axes):
+ data = simulations.data.load_Ta_HfO2()
+ G_min, G_max = simulations.data.extract_G_off_and_G_on(data)
+ vals, p = simulations.data.extract_stuck(data, G_min, G_max)
+ median_range = G_max - G_min
+ colors = utils.color_dict()
+
+ axis = axes[0]
+ shape = data.shape
+ num_pulses = shape[0] * shape[1]
+ num_bl = shape[2]
+ num_wl = shape[3]
+ pulsing_step_size = 10
+ random_proportion = 0.01
+ x = [i + 1 for i in range(0, num_pulses, pulsing_step_size)]
+ data = np.reshape(data, (num_pulses, num_bl, num_wl))
+ num_devices = num_wl * num_bl
+ num_reduced_devices = int(np.around(random_proportion * num_devices))
+ np.random.seed(0)
+ random_idxs = np.random.choice(num_devices, num_reduced_devices)
+ bl_idxs, wl_idxs = np.unravel_index(random_idxs, (num_bl, num_wl))
+ for bl_idx, wl_idx in zip(bl_idxs, wl_idxs):
+ curve_data = data[:, bl_idx, wl_idx]
+ if np.max(curve_data) - np.min(curve_data) < simulations.data.stuck_device_threshold(
+ median_range
+ ):
+ color = colors["vermilion"]
+ else:
+ color = colors["bluish-green"]
+ y = curve_data[::pulsing_step_size]
+ axis.plot(x, 1000 * y, color=color, lw=utils.Config.LINEWIDTH / 2, alpha=1 / 2)
+
+ for G in [G_min, G_max]:
+ axis.axhline(
+ 1000 * G,
+ 0,
+ 1,
+ color=colors["blue"],
+ lw=utils.Config.LINEWIDTH,
+ linestyle="dashed",
+ zorder=10,
+ )
+
+ axis.set_xlabel(utils.axis_label("pulse-number"))
+ axis.set_ylabel(utils.axis_label("conductance", unit_prefix="m"))
+
+ axis.set_xlim([0, x[-1]])
+ axis.set_ylim(bottom=0.0)
+
+ handles = [
+ Line2D([0], [0], color=colors["vermilion"], label="Stuck devices"),
+ Line2D([0], [0], color=colors["bluish-green"], label="Other devices"),
+ Line2D(
+ [0],
+ [0],
+ color=colors["blue"],
+ linestyle="dashed",
+ label=r"$G_\mathrm{off}, G_\mathrm{on}$",
+ ),
+ ]
+
+ # Distribution
+ axis = axes[1]
+ distribution = StuckDistribution(vals, p).distribution
+ x = np.linspace(0.0, 1.5e-3, int(1e4))
+ y = distribution.prob(x)
+ y = y / 1000
+ x = 1000 * x
+
+ axis.plot(y, x, lw=0.5, color=colors["vermilion"])
+ axis.scatter(
+ np.zeros_like(vals),
+ [1000 * val for val in vals],
+ marker="_",
+ alpha=0.1,
+ lw=utils.Config.LINEWIDTH / 2,
+ color=colors["vermilion"],
+ )
+ for G in [G_min, G_max]:
+ axis.axhline(
+ 1000 * G,
+ 0,
+ 1,
+ color=colors["blue"],
+ lw=utils.Config.LINEWIDTH,
+ linestyle="dashed",
+ zorder=10,
+ )
+
+ axis.set_xlabel(r"Probability density ($\mathrm{mS}^{-1}$)")
+
+ axis.set_xlim(left=0.0)
+
+ utils.add_legend(
+ fig,
+ ncol=3,
+ bbox_to_anchor=(0.5, 0.515),
+ handles=handles,
+ )
+
+
+def experimental_data():
+ fig = plt.figure(constrained_layout=True)
+ gs = fig.add_gridspec(3, 1, height_ratios=[1.0, 0.08, 1.1])
+
+ gs_top = gs[0].subgridspec(1, 2, wspace=0.03)
+ gs_bottom = gs[2].subgridspec(1, 2, wspace=0.03)
+
+ subplots = list(gs_top) + list(gs_bottom)
+ for subplot in subplots:
+ fig.add_subplot(subplot)
+
+ fig, axes = utils.fig_init(2, 0.9, custom_fig=fig)
+
+ axes[1].sharex(axes[0])
+ axes[3].sharey(axes[2])
+ axes[3].label_outer()
+
+ _SiO_x_panels(fig, axes[[0, 1]])
+ _HfO2_panels(fig, axes[[2, 3]])
+
+ utils.save_fig(fig, "experimental-data")
+
+
+def iv_nonlinearity_training(metric="error"):
+ fig, axes = utils.fig_init(2, 0.55, fig_shape=(2, 3), sharex=True, sharey=True)
+
+ iterators = simulations.iv_nonlinearity.get_iterators()
+ # Same training, different inference.
+ iterators.insert(3, iterators[0])
+ inference_idxs = [0, 0, 0, 1, 0, 0]
+
+ for idx, (iterator, inference_idx) in enumerate(zip(iterators, inference_idxs)):
+ i, j = np.unravel_index(idx, axes.shape)
+ axis = axes[i, j]
+ utils.plot_training_curves(axis, iterator, metric=metric, inference_idx=inference_idx)
+ if i + 1 == axes.shape[0]:
+ axis.set_xlabel(utils.axis_label("epoch"))
+ if j == 0:
+ axis.set_ylabel(utils.axis_label(metric))
+
+ utils.add_legend(
+ fig,
+ labels=["Training", "Validation", "Test (nonideal)"],
+ ncol=axes.shape[1],
+ bbox_to_anchor=(0.5, 1.03),
+ )
+
+ utils.save_fig(fig, "iv-nonlinearity-training", metric=metric)
+
+
+def iv_nonlinearity_inference(metric="error"):
+ fig, axes = utils.fig_init(1, 0.8)
+
+ iterators = simulations.iv_nonlinearity.get_iterators()
+ # Same training, different inference.
+ iterators.insert(3, iterators[0])
+ inference_idxs = [0, 0, 0, 1, 0, 0]
+
+ colors = [utils.color_dict()[key] for key in ["vermilion", "blue", "bluish-green"]]
+
+ boxplots = []
+
+ for idx, (iterator, inference_idx) in enumerate(zip(iterators, inference_idxs)):
+ avg_power = iterator.test_metric("avg_power", inference_idx=inference_idx)
+ y = iterator.test_metric(metric, inference_idx=inference_idx)
+ color = colors[idx % 3]
+ boxplot = utils.plot_boxplot(
+ axes, y, color, metric=metric, x=avg_power, is_x_log=True, linear_width=0.2
+ )
+ boxplots.append(boxplot)
+
+ utils.add_boxplot_legend(
+ axes, boxplots, ["Standard", "Nonideality-aware", "Nonideality-aware (regularized)"]
+ )
+
+ plt.xlabel(utils.axis_label("power-consumption"))
+ plt.ylabel(utils.axis_label(metric))
+
+ utils.save_fig(fig, "iv-nonlinearity-inference", metric=metric)
+
+
+def iv_nonlinearity_cnn(metric="error"):
+ fig, axes = utils.fig_init(2, 1 / 3, fig_shape=(1, 3), sharey=True)
+
+ colors = utils.color_dict()
+
+ iterators = simulations.iv_nonlinearity_cnn.get_iterators()
+
+ axes[0].set_ylabel(utils.axis_label(metric))
+
+ # Error curves.
+ for axis, iterator in zip(axes, iterators):
+ utils.plot_training_curves(axis, iterator, metric=metric)
+ axis.set_xlabel(utils.axis_label("epoch"))
+
+ # Box plots.
+ axis = axes[2]
+ for idx, (iterator, color) in enumerate(zip(iterators, [colors["vermilion"], colors["blue"]])):
+ y = iterator.test_metric(metric)
+ _ = utils.plot_boxplot(axis, y, color, x=idx, metric=metric, linewidth_scaling=2 / 3)
+ axis.set_xticks([0, 1])
+ axis.set_xticklabels(["Standard", "Nonideality-aware"])
+
+ axis.set_xlabel("Training")
+ axis.set_ylim(top=95.0)
+
+ utils.add_legend(
+ fig,
+ labels=["Training", "Validation", "Test (nonideal)"],
+ ncol=len(axes),
+ bbox_to_anchor=(0.35, 1.05),
+ )
+
+ utils.save_fig(fig, "iv-nonlinearity-cnn", metric=metric)
+
+
+def weight_implementation(metric="error"):
+ fig = plt.figure(constrained_layout=True)
+ gs = fig.add_gridspec(2, 1, height_ratios=[1.0, 0.8])
+
+ gs_top = gs[0].subgridspec(2, 4)
+ gs_bottom = gs[1].subgridspec(1, 2)
+
+ subplots = list(gs_top) + list(gs_bottom)
+ for subplot in subplots:
+ fig.add_subplot(subplot)
+
+ fig, axes = utils.fig_init(2, 1.0, custom_fig=fig)
+
+ for axis in axes[:8]:
+ axis.sharex(axes[4])
+ axis.sharey(axes[0])
+ axis.label_outer()
+ axis.set_aspect("equal", adjustable="box")
+
+ iterators = simulations.weight_implementation.get_iterators()[1:]
+ colors = [
+ utils.color_dict()[key] for key in ["reddish-purple", "vermilion", "blue", "bluish-green"]
+ ]
+
+ temp_iterators = [iterators[idx] for idx in [0, 1, 4, 5, 2, 3, 6, 7]]
+ for idx, (axis, iterator, color) in enumerate(zip(axes, temp_iterators, colors + colors)):
+ iterator.is_training = False
+ iterator.inference_idx = 0
+ model = architecture.get_model(iterator, custom_weights_path=iterator.weights_path())
+ weights = model.layers[1].combined_weights()
+
+ inference = iterator.current_stage()
+ G_off = inference.G_off
+ G_on = inference.G_on
+
+ if iterator.training.uses_double_weights():
+ G, _ = crossbar.map.double_w_to_G(weights, G_off, G_on)
+ else:
+ G, _ = crossbar.map.w_to_G(weights, G_off, G_on, mapping_rule=inference.mapping_rule)
+
+ G = 1e6 * G
+ utils.plot_scatter(axis, G[:, ::2], G[:, 1::2], color, random_proportion=0.1)
+
+ axis.xaxis.set_ticks(np.arange(1.0, 3.0, 0.5))
+ axis.yaxis.set_ticks(np.arange(1.0, 3.0, 0.5))
+
+ if idx > 3:
+ axis.set_xlabel(utils.axis_label("g-plus", unit_prefix="μ"))
+ if idx in [0, 4]:
+ axis.set_ylabel(utils.axis_label("g-minus", unit_prefix="μ"))
+
+ for iterator_idxs, axis in zip([[0, 1, 4, 5], [2, 3, 6, 7]], axes[-2:]):
+ for iterator_idx, color in zip(iterator_idxs, colors):
+ iterator = iterators[iterator_idx]
+ avg_power = iterator.test_metric("avg_power")
+ y = iterator.test_metric(metric)
+ utils.plot_boxplot(axis, y, color, x=1000 * avg_power, metric=metric, linear_width=0.2)
+ axis.set_xlabel(utils.axis_label("power-consumption", unit_prefix="m"))
+
+ axes[-2].set_ylabel(utils.axis_label(metric))
+ axes[-1].sharey(axes[-2])
+ axes[-1].sharex(axes[-2])
+ axes[-1].label_outer()
+
+ utils.save_fig(fig, "weight-implementation", metric=metric)
+
+
+def memristive_validation(metric="error"):
+ fig, axes = utils.fig_init(2, 1 / 3, fig_shape=(1, 3), sharey=True)
+
+ iterator = simulations.memristive_validation.get_nonideal_iterators()[0]
+
+ axes[0].set_ylabel(utils.axis_label(metric))
+
+ # Curves
+ for idx, (standard_mode, axis) in enumerate(zip([True, False], axes)):
+ iterator.training.is_standard_validation_mode = standard_mode
+ utils.plot_training_curves(axis, iterator, metric=metric)
+ axis.set_xlabel(utils.axis_label("epoch"))
+
+ # Box plots
+ axis = axes[-1]
+ colors = [utils.color_dict()[key] for key in ["vermilion", "blue"]]
+
+ for idx, (standard_mode, color) in enumerate(zip([True, False], colors)):
+ iterator.training.is_standard_validation_mode = standard_mode
+ y = iterator.test_metric(metric)
+ _ = utils.plot_boxplot(axis, y, color, x=idx, metric=metric, linewidth_scaling=2 / 3)
+
+ axis.set_xticks([0, 1])
+ axis.set_xticklabels(["Standard", "Memristive"])
+ axis.set_xlabel(utils.axis_label("checkpoint"))
+
+ utils.add_legend(
+ fig,
+ labels=["Training", "Validation", "Test (nonideal)"],
+ ncol=len(axes),
+ bbox_to_anchor=(0.35, 1.05),
+ )
+
+ utils.save_fig(fig, "memristive-validation", metric=metric)
+
+
+def nonideality_agnosticism(metric: str = "error", norm_rows=True, include_val_label=False):
+ training_labels = {
+ "nonreg__64__none_none__ideal": "Ideal",
+ "nonreg__64__0.000997_0.00351__IVNL:2.13_0.0953": r"Low $I$-$V$ nonlin. [$\mathrm{SiO}_x$]",
+ "reg__64__0.000997_0.00351__IVNL:2.13_0.0953": r"Low $I$-$V$ nonlin. [$\mathrm{SiO}_x$] (reg.)",
+ "nonreg__64__7.72e-07_2.73e-06__IVNL:2.99_0.369": r"High $I$-$V$ nonlin. [$\mathrm{SiO}_x$]",
+ "reg__64__7.72e-07_2.73e-06__IVNL:2.99_0.369": r"High $I$-$V$ nonlin. [$\mathrm{SiO}_x$] (reg.)",
+ "nonreg__64__7.72e-07_2.73e-06__StuckOff:0.05": r"Stuck at $G_\mathrm{off}$",
+ "nonreg__64__4.36e-05_0.000978__StuckDistr:0.101_1.77e-05": r"Stuck [$\mathrm{Ta/HfO}_2$]",
+ "nonreg__64__7.72e-07_2.73e-06__D2DLN:0.25_0.25": "More uniform D2D var.",
+ "reg__64__7.72e-07_2.73e-06__D2DLN:0.25_0.25": "More uniform D2D var. (reg.)",
+ "nonreg__64__7.72e-07_2.73e-06__D2DLN:0.05_0.5": "Less uniform D2D var.",
+ "reg__64__7.72e-07_2.73e-06__D2DLN:0.05_0.5": "Less uniform D2D var. (reg.)",
+ "nonreg__64__7.72e-07_2.73e-06__IVNL:2.99_0.369+StuckOn:0.05": r"High $I$-$V$ nonlin. [$\mathrm{SiO}_x$] + stuck at $G_\mathrm{on}$",
+ "nonreg__64__7.72e-07_2.73e-06__D2DLN:0.5_0.5": "High D2D var.",
+ }
+ inference_labels = {
+ "none_none__ideal": training_labels["nonreg__64__none_none__ideal"],
+ "0.000997_0.00351__IVNL:2.13_0.0953": training_labels[
+ "nonreg__64__0.000997_0.00351__IVNL:2.13_0.0953"
+ ],
+ "7.72e-07_2.73e-06__IVNL:2.99_0.369": training_labels[
+ "nonreg__64__7.72e-07_2.73e-06__IVNL:2.99_0.369"
+ ],
+ "7.72e-07_2.73e-06__StuckOff:0.05": training_labels[
+ "nonreg__64__7.72e-07_2.73e-06__StuckOff:0.05"
+ ],
+ "4.36e-05_0.000978__StuckDistr:0.101_1.77e-05": training_labels[
+ "nonreg__64__4.36e-05_0.000978__StuckDistr:0.101_1.77e-05"
+ ],
+ "7.72e-07_2.73e-06__D2DLN:0.25_0.25": training_labels[
+ "nonreg__64__7.72e-07_2.73e-06__D2DLN:0.25_0.25"
+ ],
+ "7.72e-07_2.73e-06__D2DLN:0.05_0.5": training_labels[
+ "nonreg__64__7.72e-07_2.73e-06__D2DLN:0.05_0.5"
+ ],
+ "7.72e-07_2.73e-06__IVNL:2.99_0.369+StuckOn:0.05": training_labels[
+ "nonreg__64__7.72e-07_2.73e-06__IVNL:2.99_0.369+StuckOn:0.05"
+ ],
+ "7.72e-07_2.73e-06__D2DLN:0.5_0.5": training_labels[
+ "nonreg__64__7.72e-07_2.73e-06__D2DLN:0.5_0.5"
+ ],
+ }
+ df = pd.DataFrame(
+ columns=[training_labels[key] for key in training_labels],
+ index=[inference_labels[key] for key in inference_labels],
+ )
+ df = df.astype(float)
+ iterators = simulations.nonideality_agnosticism.get_iterators()
+ for iterator in iterators:
+ training_label = training_labels[iterator.training.label()]
+ ys = [
+ iterator.test_metric(metric, inference_idx=idx)
+ for idx in range(len(iterator.inferences))
+ ]
+ for inference, y in zip(iterator.inferences, ys):
+ inference_label = inference_labels[inference.label()]
+ df.at[inference_label, training_label] = np.median(y)
+
+ fig, axes = utils.fig_init(2, 0.5)
+
+ filename = "nonideality-agnosticism"
+ if not norm_rows:
+ filename += "-not-norm"
+ if include_val_label:
+ filename += "-with-val-label"
+
+ utils.add_heatmap(
+ fig, axes, df, x_ticks=df.columns, y_ticks=df.index, metric=metric, norm_rows=norm_rows
+ )
+
+ axes.set_ylabel(utils.axis_label("inference"))
+ axes.set_xlabel(utils.axis_label("training"))
+
+ if include_val_label:
+ axes.text(
+ 1.05,
+ 0.5,
+ utils.axis_label("error", prepend="median"),
+ horizontalalignment="center",
+ verticalalignment="center",
+ rotation=-90,
+ transform=axes.transAxes,
+ fontsize=utils.Config.AXIS_LABEL_FONT_SIZE,
+ )
+
+ utils.save_fig(fig, filename, metric=metric)
diff --git a/awarememristor/plotting/supporting_figures.py b/awarememristor/plotting/supporting_figures.py
new file mode 100644
index 0000000..d49889d
--- /dev/null
+++ b/awarememristor/plotting/supporting_figures.py
@@ -0,0 +1,236 @@
+import matplotlib.pyplot as plt
+import numpy as np
+
+from awarememristor import simulations
+from awarememristor.plotting import utils
+
+
+def all_iv_curves_full_range():
+ fig, axes = utils.fig_init(2, 0.6, fig_shape=(1, 1))
+
+ data = simulations.data.load_SiO_x_multistate()
+ voltages, currents = simulations.data.all_SiO_x_curves(data, max_voltage=5.0)
+
+ colors = utils.color_list()[:-1]
+ num_colors = len(colors)
+ for idx in range(voltages.shape[0]):
+ voltage_curve = voltages[idx, :]
+ current_curve = currents[idx, :]
+ color_idx = idx % num_colors
+ axes.plot(
+ voltage_curve,
+ current_curve,
+ linewidth=utils.Config.LINEWIDTH,
+ color=colors[color_idx],
+ )
+
+ axes.set_xlim(left=0.0)
+ axes.set_xlabel(utils.axis_label("voltage"))
+ axes.yaxis.get_offset_text().set_fontsize(utils.Config.TICKS_FONT_SIZE)
+
+ axes.set_ylabel(utils.axis_label("current"))
+ axes.set_yscale("log")
+
+ utils.save_fig(fig, "all-SiO_x-IV-curves-full-range", is_supporting=True)
+
+
+def switching():
+ fig, axes = utils.fig_init(2, 0.5, fig_shape=(1, 1))
+
+ data = simulations.data.load_SiO_x_switching()
+ data[:, 0, :] = np.abs(data[:, 0, :])
+
+ colors = [utils.color_dict()[color_name] for color_name in ["blue", "orange"]]
+ labels = ["SET", "RESET"]
+ labels_x = [0.15, 1 - 0.15]
+ for idx, (color, label, label_x) in enumerate(zip(colors, labels, labels_x)):
+ voltage_curve = data[:, 1, idx]
+ current_curve = np.abs(data[:, 0, idx])
+ line = axes.plot(
+ voltage_curve,
+ current_curve,
+ linewidth=utils.Config.LINEWIDTH,
+ color=color,
+ )
+ utils.add_arrow(line[0], 60)
+ utils.add_arrow(line[0], -60)
+ utils.add_text(
+ axes,
+ label,
+ (label_x, 0.88),
+ fontsize=utils.Config.TEXT_LABEL_SIZE,
+ color=color,
+ )
+
+ axes.set_xlabel(utils.axis_label("voltage"))
+
+ axes.set_ylabel(utils.axis_label("current", prepend="absolute"))
+ axes.set_ylim(bottom=5e-8, top=5e-3)
+ axes.set_yscale("log")
+
+ utils.save_fig(fig, "SiO_x-switching", is_supporting=True)
+
+
+def _training_curves_multiple_panels(
+ width_num_cols,
+ height_frac,
+ fig_shape,
+ iterators,
+ metric,
+ figure_name,
+ inference_idxs=None,
+ y_lim=None,
+):
+ fig, axes = utils.fig_init(
+ width_num_cols, height_frac, fig_shape=fig_shape, sharex=True, sharey=True
+ )
+ if inference_idxs is None:
+ inference_idxs = [0 for _ in range(len(iterators))]
+
+ for training_idx, linestyle in enumerate(utils.get_linestyles()):
+ for i in range(len(iterators)):
+ iterators[i].training.repeat_idx = training_idx
+
+ for idx, (iterator, inference_idx) in enumerate(zip(iterators, inference_idxs)):
+ if len(axes.shape) == 1:
+ axis = axes[idx]
+ else:
+ i, j = np.unravel_index(idx, axes.shape)
+ axis = axes[i, j]
+ utils.plot_training_curves(
+ axis,
+ iterator,
+ metric=metric,
+ inference_idx=inference_idx,
+ linestyle=linestyle,
+ is_many=True,
+ )
+ if len(axes.shape) == 1:
+ axis.set_xlabel(utils.axis_label("epoch"))
+ if idx == 0:
+ axis.set_ylabel(utils.axis_label(metric))
+ else:
+ if i + 1 == axes.shape[0]:
+ axis.set_xlabel(utils.axis_label("epoch"))
+ if j == 0:
+ axis.set_ylabel(utils.axis_label(metric))
+ if y_lim is not None:
+ axis.set_ylim(top=y_lim)
+
+ utils.add_legend(
+ fig,
+ labels=["Training", "Validation", "Test (nonideal)"],
+ ncol=3,
+ bbox_to_anchor=(0.5, 1.03),
+ )
+
+ utils.save_fig(fig, f"{figure_name}-training", is_supporting=True, metric=metric)
+
+
+def iv_nonlinearity_training(metric="error"):
+ iterators = simulations.iv_nonlinearity.get_iterators()
+ # Same training, different inference.
+ iterators.insert(3, iterators[0])
+ inference_idxs = [0, 0, 0, 1, 0, 0]
+
+ _training_curves_multiple_panels(
+ 2,
+ 0.55,
+ (2, 3),
+ iterators,
+ metric,
+ "iv-nonlinearity",
+ inference_idxs=inference_idxs,
+ )
+
+
+def weight_implementation_standard_weights_training(metric="error"):
+ iterators = simulations.weight_implementation.get_nonideal_iterators()[:4]
+
+ _training_curves_multiple_panels(
+ (2, 3),
+ 0.77,
+ (2, 2),
+ iterators,
+ metric,
+ "weight-implementation-standard-weights",
+ )
+
+
+def weight_implementation_double_weights_training(metric="error"):
+ iterators = [
+ simulations.weight_implementation.get_ideal_iterator()
+ ] + simulations.weight_implementation.get_nonideal_iterators()[4:]
+ # Same training, different inference.
+ iterators.insert(3, iterators[0])
+ inference_idxs = [0, 0, 0, 1, 0, 0]
+
+ _training_curves_multiple_panels(
+ 2,
+ 0.55,
+ (2, 3),
+ iterators,
+ metric,
+ "weight-implementation-double-weights",
+ inference_idxs=inference_idxs,
+ y_lim=95,
+ )
+
+
+def memristive_validation_training(metric="error"):
+ iterators = [
+ *simulations.memristive_validation.get_iterators(),
+ simulations.memristive_validation.get_iterators()[1],
+ ]
+ iterators[1].training.is_standard_validation_mode = True
+
+ # Only first five will be plotted.
+ _training_curves_multiple_panels(
+ 2,
+ 0.32,
+ (1, 3),
+ iterators,
+ metric,
+ "memristive-validation",
+ y_lim=95,
+ )
+
+
+def stuck_off_training(metric="error"):
+ iterators = simulations.stuck_off.get_iterators()
+
+ _training_curves_multiple_panels(
+ (2, 3),
+ 0.45,
+ (1, 2),
+ iterators,
+ metric,
+ "stuck-off",
+ )
+
+
+def high_iv_nonlinearity_and_stuck_on_training(metric="error"):
+ iterators = simulations.iv_nonlinearity_and_stuck_on.get_iterators()
+
+ _training_curves_multiple_panels(
+ (2, 3),
+ 0.45,
+ (1, 2),
+ iterators,
+ metric,
+ "high-iv-nonlinearity-and-stuck-on",
+ )
+
+
+def stuck_distribution_training(metric="error"):
+ iterators = simulations.stuck_distribution.get_iterators()
+
+ _training_curves_multiple_panels(
+ (2, 3),
+ 0.45,
+ (1, 2),
+ iterators,
+ metric,
+ "stuck-distribution",
+ y_lim=95,
+ )
diff --git a/awarememristor/plotting/utils.py b/awarememristor/plotting/utils.py
new file mode 100644
index 0000000..5c17f95
--- /dev/null
+++ b/awarememristor/plotting/utils.py
@@ -0,0 +1,550 @@
+import copy
+import os
+from typing import Union
+
+import matplotlib
+import matplotlib.pyplot as plt
+import matplotlib.transforms as mtransforms
+import numpy as np
+from numpy import ma
+
+
+def _cm_to_in(length: float) -> float:
+ return length / 2.54
+
+
+class Config:
+ AXIS_LABEL_FONT_SIZE: float = 12
+ LEGEND_FONT_SIZE: float = 8
+ TICKS_FONT_SIZE: float = 8
+ SUBPLOT_LABEL_SIZE: float = 12
+ TEXT_LABEL_SIZE: float = 10
+ LINEWIDTH: float = 0.75
+ MARKER_SIZE: float = 0.5
+ BOXPLOT_LINEWIDTH: float = 0.75
+ # Advanced Science
+ ONE_COLUMN_WIDTH: float = _cm_to_in(8.5)
+ TWO_COLUMNS_WIDTH: float = _cm_to_in(17.8)
+ TWO_THIRDS_COLUMN_WIDTH: float = 2 / 3 * TWO_COLUMNS_WIDTH
+ COL_WIDTHS: dict[Union[int, tuple[int, int]], float] = {
+ 1: ONE_COLUMN_WIDTH,
+ 2: TWO_COLUMNS_WIDTH,
+ (2, 3): TWO_THIRDS_COLUMN_WIDTH,
+ }
+
+
+def color_list() -> list[str]:
+ """Return colors of Okabe-Ito colorblind-friendly palette.
+
+ Returns:
+ HEX color codes.
+ """
+ colors = [
+ "#E69F00",
+ "#56B4E9",
+ "#009E73",
+ "#F0E442",
+ "#0072B2",
+ "#D55E00",
+ "#CC79A7",
+ "#000000",
+ ]
+ return colors
+
+
+def color_dict() -> dict[str, str]:
+ """Return same as `colors_list()` but dict."""
+ color_names = [
+ "orange",
+ "sky-blue",
+ "bluish-green",
+ "yellow",
+ "blue",
+ "vermilion",
+ "reddish-purple",
+ "black",
+ ]
+ colors = dict(zip(color_names, color_list()))
+ return colors
+
+
+def get_linestyles():
+ return [
+ "solid",
+ "dotted",
+ "dashed",
+ "dashdot",
+ (0, (1, 10)), # "loosely dotted"
+ ]
+
+
+def fig_init(
+ width_num_cols: Union[int, tuple[int, int]],
+ height_frac: float,
+ fig_shape: tuple[int, int] = (1, 1),
+ sharex=False,
+ sharey=False,
+ scaled_position: tuple[float, float] = (-0.06, 1.04),
+ custom_fig: matplotlib.figure = None,
+) -> tuple[matplotlib.figure, matplotlib.axes]:
+ width = Config.COL_WIDTHS[width_num_cols]
+ height = height_frac * width
+ if custom_fig is not None:
+ fig = custom_fig
+ fig.set_size_inches(width, height)
+ axes = fig.axes
+ axes = np.array(axes)
+ fig_shape = axes.shape
+ else:
+ fig, axes = plt.subplots(
+ *fig_shape,
+ sharex=sharex,
+ sharey=sharey,
+ figsize=(width, height),
+ )
+ fig.tight_layout()
+
+ if fig_shape == (1, 1):
+ temp_axes = np.array([axes])
+ else:
+ temp_axes = axes
+
+ for idx, axis in enumerate(temp_axes.flatten()):
+ axis.xaxis.label.set_size(Config.AXIS_LABEL_FONT_SIZE)
+ axis.yaxis.label.set_size(Config.AXIS_LABEL_FONT_SIZE)
+ axis.tick_params(axis="both", which="both", labelsize=Config.TICKS_FONT_SIZE)
+ if fig_shape != (1, 1):
+ add_subfigure_label(axis, idx, scaled_position, Config.SUBPLOT_LABEL_SIZE)
+
+ return fig, axes
+
+
+def add_subfigure_label(
+ axis,
+ letter_idx: int,
+ normalised_position: tuple[float, float],
+ fontsize: float = Config.SUBPLOT_LABEL_SIZE,
+ is_lowercase: bool = True,
+):
+ ascii_idx = 65 + letter_idx
+ if is_lowercase:
+ ascii_idx += 32
+ add_text(axis, chr(ascii_idx), normalised_position, fontsize=fontsize, fontweight="bold")
+
+
+def add_text(
+ axis,
+ text: str,
+ normalised_position: tuple[float, float],
+ fontsize: float = Config.TEXT_LABEL_SIZE,
+ fontweight: str = "normal",
+ color: str = None,
+):
+ axis.text(
+ *normalised_position,
+ text,
+ horizontalalignment="center",
+ verticalalignment="center",
+ transform=axis.transAxes,
+ fontweight=fontweight,
+ fontsize=fontsize,
+ color=color,
+ )
+
+
+def plot_training_curves(
+ axis,
+ iterator,
+ metric="error",
+ inference_idx=0,
+ linestyle="solid",
+ is_many=False,
+):
+ colors = color_dict()
+
+ # Training curve.
+ x_training, y_training = iterator.training_curves(metric)
+ plot_curve(
+ axis,
+ x_training,
+ y_training,
+ colors["orange"],
+ metric=metric,
+ linestyle=linestyle,
+ is_many=is_many,
+ )
+
+ # Validation curve.
+ x_validation, y_validation = iterator.validation_curves(metric)
+ plot_curve(
+ axis,
+ x_validation,
+ y_validation,
+ colors["sky-blue"],
+ metric=metric,
+ linestyle=linestyle,
+ is_many=is_many,
+ )
+
+ # Testing (during training) curve.
+ x_training_testing, y_training_testing = iterator.training_testing_curves(
+ metric, iterator.inferences[inference_idx]
+ )
+ plot_curve(
+ axis,
+ x_training_testing,
+ y_training_testing,
+ colors["reddish-purple"],
+ metric=metric,
+ linestyle=linestyle,
+ is_many=is_many,
+ )
+
+ axis.set_yscale("log")
+ axis.set_xlim([0, len(x_training)])
+
+
+def plot_curve(axis, x, y, color, metric="error", linestyle="solid", is_many=False):
+ if metric in ["accuracy", "error"]:
+ y = 100 * y
+ lw = Config.LINEWIDTH
+ if is_many:
+ lw /= 2
+ if len(y.shape) > 1:
+ alpha = 0.25
+ if is_many:
+ alpha /= 2
+ y_min = np.min(y, axis=1)
+ y_max = np.max(y, axis=1)
+ y_median = np.median(y, axis=1)
+ axis.fill_between(x, y_min, y_max, color=color, alpha=alpha, linewidth=0)
+ axis.plot(x, y_median, color=color, linewidth=lw / 2, linestyle=linestyle)
+ else:
+ if is_many:
+ x = np.concatenate((x[::20], [x[-1]]))
+ y = np.concatenate((y[::20], [y[-1]]))
+ lw /= 2 # Make all curves the same linewidth.
+ axis.plot(x, y, color=color, linewidth=lw, linestyle=linestyle)
+
+
+def _numpify(x):
+ try: # In case `tf.Tensor`
+ x = x.numpy()
+ except AttributeError:
+ pass
+
+ return x
+
+
+def plot_scatter(axis, x, y, color, alpha=1.0, random_proportion=None):
+ x = _numpify(x)
+ y = _numpify(y)
+ x = x.flatten()
+ y = y.flatten()
+ if random_proportion:
+ np.random.seed(0)
+ num_points = x.size
+ num_reduced_points = int(np.around(random_proportion * num_points))
+ random_idxs = np.random.choice(num_points, num_reduced_points)
+ x = x[random_idxs]
+ y = y[random_idxs]
+ axis.scatter(
+ x,
+ y,
+ color=color,
+ marker="x",
+ s=Config.MARKER_SIZE,
+ linewidth=Config.MARKER_SIZE,
+ alpha=alpha,
+ )
+
+
+def plot_boxplot(
+ axis,
+ y,
+ color,
+ x=None,
+ metric="error",
+ is_x_log=False,
+ linewidth_scaling=1.0,
+ linear_width: float = 0.25,
+):
+ y = y.flatten()
+ if metric in ["accuracy", "error"]:
+ y = 100 * y
+
+ positions = None
+ if x is not None:
+ try:
+ x = x.flatten()
+ except AttributeError:
+ pass
+ positions = [np.mean(x)]
+ if is_x_log and positions is not None:
+ widths = [
+ 10 ** (np.log10(positions[0]) + linear_width / 2.0)
+ - 10 ** (np.log10(positions[0]) - linear_width / 2.0)
+ ]
+ else:
+ widths = [linear_width]
+ boxplot = axis.boxplot(
+ y,
+ positions=positions,
+ widths=widths,
+ sym=color,
+ )
+ plt.setp(boxplot["fliers"], marker="x", markersize=4 * Config.MARKER_SIZE, markeredgewidth=0.5)
+ for element in ["boxes", "whiskers", "fliers", "means", "medians", "caps"]:
+ plt.setp(
+ boxplot[element], color=color, linewidth=linewidth_scaling * Config.BOXPLOT_LINEWIDTH
+ )
+
+ if is_x_log:
+ axis.set_xscale("log")
+ else:
+ axis.set_xscale("linear")
+ axis.set_yscale("log")
+
+ return boxplot
+
+
+def add_boxplot_legend(axis, boxplots, labels, linewdith=1.0, loc="upper right"):
+ leg = axis.legend(
+ [boxplot["boxes"][0] for boxplot in boxplots],
+ labels,
+ fontsize=Config.LEGEND_FONT_SIZE,
+ frameon=False,
+ loc=loc,
+ )
+ for line in leg.get_lines():
+ line.set_linewidth(linewdith)
+
+
+def add_legend(
+ fig,
+ labels=None,
+ ncol=1,
+ loc="center",
+ bbox_to_anchor=(0.5, 1.0),
+ linewidth=1.0,
+ frameon=False,
+ handles=None,
+):
+ if handles is None:
+ leg = fig.legend(labels, ncol=ncol, loc=loc, bbox_to_anchor=bbox_to_anchor, frameon=frameon)
+ else:
+ leg = fig.legend(
+ handles=handles, ncol=ncol, loc=loc, bbox_to_anchor=bbox_to_anchor, frameon=frameon
+ )
+ for line in leg.get_lines():
+ line.set_linewidth(linewidth)
+
+
+def save_fig(fig, name: str, is_supporting: bool = False, metric: str = "error"):
+ dir_name = "plots"
+ os.makedirs(dir_name, exist_ok=True)
+ if metric != "error":
+ name += f"-{metric}"
+ if is_supporting:
+ name = f"supporting-information--{name}"
+ path = os.path.join(dir_name, f"{name}.pdf")
+ fig.savefig(path, bbox_inches="tight", transparent=True)
+
+
+def axis_label(var_name: str, prepend: str = None, unit_prefix: str = "") -> str:
+ if var_name == "accuracy":
+ label = "accuracy (%)"
+ elif var_name == "error":
+ label = "error (%)"
+ elif var_name == "loss":
+ label = "loss"
+ elif var_name == "epoch":
+ label = "epoch"
+ elif var_name == "inference":
+ label = "inference"
+ elif var_name == "training":
+ label = "training"
+ elif var_name == "power-consumption":
+ label = f"power consumption ({unit_prefix}W)"
+ elif var_name == "d2d-uniformity":
+ label = "uniformity of D2D variability"
+ elif var_name == "checkpoint":
+ label = "checkpoint"
+ elif var_name == "conductance":
+ label = f"conductance ({unit_prefix}S)"
+ elif var_name == "voltage":
+ label = f"voltage ({unit_prefix}V)"
+ elif var_name == "current":
+ label = f"current ({unit_prefix}A)"
+ elif var_name == "nonlinearity-parameter":
+ label = "nonlinearity parameter"
+ elif var_name == "pulse-number":
+ label = "pulse number"
+ elif var_name == "g-plus":
+ label = rf"$G_{{+}}$ ({unit_prefix}S)"
+ elif var_name == "g-minus":
+ label = rf"$G_{{-}}$ ({unit_prefix}S)"
+ else:
+ raise ValueError(f'Unrecognised variable name "{var_name}".')
+
+ if prepend is not None:
+ label = f"{prepend} {label}"
+
+ first_letter = label[0].upper()
+ if len(label) > 1:
+ label = first_letter + label[1:]
+ else:
+ label = first_letter
+
+ return label
+
+
+def _get_luminance(r, g, b):
+ """Adapted from ."""
+ return 0.299 * r + 0.587 * g + 0.114 * b
+
+
+def _annotate_heatmap(
+ im: matplotlib.image,
+ data: np.array = None,
+ valfmt: Union[str, matplotlib.ticker.StrMethodFormatter] = "{x:.2f}",
+ textcolors: tuple[str, str] = ("black", "white"),
+ threshold: float = 0.5,
+ norm_rows: bool = False,
+ **textkw,
+):
+ """Annotate a heatmap. Adapted from
+ .
+
+ Args:
+ im: The image to be labelled.
+ data: Data used to annotate. If `None`, the image's data is used.
+ valfmt: The format of the annotations inside the heatmap. This should
+ either use the string format method, e.g. `{x:.2f}`, or be a
+ `matplotlib.ticker.Formatter`.
+ textcolors: A pair of colors. The first is used for values below a
+ threshold, the second for those above.
+ threshold: Value in data units according to which the colors from
+ textcolors are applied. If `None` (the default) uses the middle of
+ the colormap as separation.
+ **kwargs: All other arguments are forwarded to each call to `text` used to create the text labels.
+ """
+ if not isinstance(data, (list, np.ndarray)):
+ data = im.get_array()
+
+ # Set default alignment to center, but allow it to be overwritten by textkw.
+ kw = dict(horizontalalignment="center", verticalalignment="center")
+ kw.update(textkw)
+
+ # Get the formatter in case a string is supplied
+ if isinstance(valfmt, str):
+ valfmt = matplotlib.ticker.StrMethodFormatter(valfmt)
+
+ # Loop over the data and create a `Text` for each "pixel".
+ # Change the text's color depending on the data.
+ texts = []
+ for i in range(data.shape[0]):
+ if norm_rows:
+ colors = im.cmap(matplotlib.colors.LogNorm()(data[i, :]))
+ luminance = _get_luminance(colors[:, 0], colors[:, 1], colors[:, 2])
+ else:
+ colors = im.cmap(matplotlib.colors.LogNorm()(data))
+ luminance = _get_luminance(colors[:, :, 0], colors[:, :, 1], colors[:, :, 2])
+ for j in range(data.shape[1]):
+ if norm_rows:
+ cell_luminance = luminance[j]
+ else:
+ cell_luminance = luminance[i, j]
+ kw.update(color=textcolors[int(cell_luminance > threshold)])
+ text = im.axes.text(j, i, valfmt(data[i, j], None), **kw)
+ texts.append(text)
+
+ return texts
+
+
+def add_heatmap(fig, axis, data, x_ticks=None, y_ticks=None, metric="error", norm_rows=False):
+ if metric in ["accuracy", "error"]:
+ data = 100 * data
+
+ data = data.to_numpy()
+ if norm_rows:
+ num_rows = data.shape[0]
+ row_indices = np.arange(num_rows)
+ for i in range(num_rows):
+ row_data = ma.array(copy.deepcopy(data))
+ row_data[row_indices != i, :] = ma.masked
+ image = axis.imshow(row_data, norm=matplotlib.colors.LogNorm(), cmap="cividis")
+ else:
+ image = axis.imshow(data, norm=matplotlib.colors.LogNorm(), cmap="cividis")
+
+ if x_ticks is not None:
+ axis.set_xticks(np.arange(len(x_ticks)))
+ axis.set_xticklabels(x_ticks)
+ axis.tick_params(top=True, bottom=False, labeltop=True, labelbottom=False)
+ plt.setp(axis.get_xticklabels(), rotation=-45, ha="right", rotation_mode="anchor")
+ axis.xaxis.set_label_position("top")
+ if y_ticks is not None:
+ axis.set_yticks(np.arange(len(y_ticks)))
+ axis.set_yticklabels(y_ticks)
+
+ if not norm_rows:
+ cbar = fig.colorbar(image, ax=axis)
+ cbar.ax.set_ylabel(
+ axis_label(metric, prepend="median"),
+ rotation=-90,
+ fontsize=Config.AXIS_LABEL_FONT_SIZE,
+ va="bottom",
+ )
+ cbar.ax.tick_params(axis="both", which="both", labelsize=Config.TICKS_FONT_SIZE)
+
+ _annotate_heatmap(
+ image,
+ data=data,
+ valfmt="{x:.1f}",
+ textcolors=("white", "black"),
+ size=Config.TICKS_FONT_SIZE,
+ norm_rows=norm_rows,
+ )
+
+
+def add_histogram(axis, values: np.ndarray, color: str, bins: int = 100, alpha: float = 1.0):
+ try: # In case `tf.Tensor`
+ values = values.numpy()
+ except AttributeError:
+ pass
+ values = values.flatten()
+ axis.hist(values, bins=bins, color=color, alpha=alpha)
+
+
+def add_arrow(
+ line,
+ start_idx: int,
+ direction: str = "right",
+ size: float = 15,
+ color: str = None,
+ linewidth: float = None,
+):
+ """Adds arrow to a curve.
+
+ Adapted from .
+ """
+ if color is None:
+ color = line.get_color()
+ if linewidth is None:
+ linewidth = line.get_linewidth()
+
+ xdata = line.get_xdata()
+ ydata = line.get_ydata()
+
+ if direction == "right":
+ end_idx = start_idx + 1
+ else:
+ end_idx = start_idx - 1
+
+ line.axes.annotate(
+ "",
+ xytext=(xdata[start_idx], ydata[start_idx]),
+ xy=(xdata[end_idx], ydata[end_idx]),
+ arrowprops=dict(linewidth=linewidth, arrowstyle="->", color=color),
+ size=size,
+ )
diff --git a/awarememristor/simulations/__init__.py b/awarememristor/simulations/__init__.py
new file mode 100644
index 0000000..8031db0
--- /dev/null
+++ b/awarememristor/simulations/__init__.py
@@ -0,0 +1,7 @@
+from awarememristor.simulations import (data, ideal, iv_nonlinearity,
+ iv_nonlinearity_and_stuck_on,
+ iv_nonlinearity_cnn,
+ memristive_validation,
+ nonideality_agnosticism,
+ stuck_distribution, stuck_off,
+ weight_implementation)
diff --git a/awarememristor/simulations/data.py b/awarememristor/simulations/data.py
new file mode 100644
index 0000000..4ab8007
--- /dev/null
+++ b/awarememristor/simulations/data.py
@@ -0,0 +1,182 @@
+import os
+from typing import Optional
+
+import h5py
+import numpy as np
+import openpyxl
+import pandas as pd
+import requests
+from scipy.io import loadmat
+
+
+def load_SiO_x_multistate() -> np.ndarray:
+ """Load SiO_x data from multiple conductance states.
+
+ Returns:
+ Array of shape `(2, num_states, num_points)`. The first dimension
+ combines current and voltage values.
+ """
+ path = os.path.join(_create_and_get_data_dir(), "SiO_x-multistate-data.mat")
+ _validate_data_path(path, url="https://zenodo.org/record/5762184/files/excelDataCombined.mat")
+ data = loadmat(path)["data"]
+ data = np.flip(data, axis=2)
+ data = np.transpose(data, (1, 2, 0))
+ data = data[:2, :, :]
+ return data
+
+
+def _workbook_sheet_to_ndarray(workbook, sheet_name: str):
+ sheet = workbook[sheet_name]
+ return pd.DataFrame(sheet.values).to_numpy()[1:, :]
+
+
+def load_SiO_x_switching() -> np.ndarray:
+ """Load SiO_x switching data.
+
+ Returns:
+ Array of shape `(num_points, 2, 2)`. The second dimension combines
+ current and voltage values, while the third combined SET and RESET
+ modes.
+ """
+ path = os.path.join(_create_and_get_data_dir(), "SiO_x-switching-data.xlsx")
+ _validate_data_path(
+ path,
+ url="https://zenodo.org/record/5762184/files/Ordinary%20I-V%20data%20%28full%20cycle%29.xlsx",
+ )
+ worksheet = openpyxl.load_workbook(filename=path)
+ set_data = _workbook_sheet_to_ndarray(worksheet, "SET")[:, :2]
+ reset_data = _workbook_sheet_to_ndarray(worksheet, "RESET")[:, :2]
+ data = np.stack([set_data, reset_data], axis=-1)
+ return data
+
+
+def all_SiO_x_curves(data, max_voltage=0.5, voltage_step=0.005):
+ num_points = int(max_voltage / voltage_step) + 1
+
+ data = data[:, :, :num_points]
+ voltages = data[1, :, :]
+ currents = data[0, :, :]
+
+ return voltages, currents
+
+
+def low_high_n_SiO_x_curves(data):
+ # Arbitrary, but 11 results in a similar G_on/G_off ratio.
+ NUM_LOW_N_CURVES = 11
+
+ voltages, currents = all_SiO_x_curves(data)
+
+ num_points = voltages.shape[1]
+ half_voltage_idx = int(num_points / 2)
+ resistances = voltages[:, half_voltage_idx] / currents[:, half_voltage_idx]
+ indices = np.argsort(resistances)
+ resistances = resistances[indices]
+ voltages = voltages[indices, :]
+ currents = currents[indices, :]
+
+ low_n_ratio = resistances[NUM_LOW_N_CURVES - 1] / resistances[0]
+
+ high_n_R_off = resistances[-1]
+ idx = len(indices) - 2
+ while True:
+ # Stop whenever we exceed G_on/G_off ratio of low-nonlinearity region.
+ if high_n_R_off / resistances[idx] > low_n_ratio:
+ break
+ idx -= 1
+
+ low_n_voltages = voltages[:NUM_LOW_N_CURVES, :]
+ low_n_currents = currents[:NUM_LOW_N_CURVES, :]
+ high_n_voltages = voltages[idx:, :]
+ high_n_currents = currents[idx:, :]
+
+ return (low_n_voltages, low_n_currents), (high_n_voltages, high_n_currents)
+
+
+def nonlinearity_parameter(current_curve):
+ num_points = len(current_curve)
+ half_voltage_idx = int(num_points / 2)
+ return current_curve[-1] / current_curve[half_voltage_idx]
+
+
+def G_at_half_voltage(voltage_curve, current_curve):
+ num_points = len(current_curve)
+ half_voltage_idx = int(num_points / 2)
+ return current_curve[half_voltage_idx] / voltage_curve[half_voltage_idx]
+
+
+def low_high_n_SiO_x_vals(data, is_high_nonlinearity):
+ curves = low_high_n_SiO_x_curves(data)
+ if is_high_nonlinearity:
+ idx = 1
+ else:
+ idx = 0
+ voltage_curves, current_curves = curves[idx]
+
+ n = [nonlinearity_parameter(curve) for curve in current_curves]
+ n_avg, n_std = np.mean(n), np.std(n, ddof=1)
+
+ G_on = G_at_half_voltage(voltage_curves[0, :], current_curves[0, :])
+ G_off = G_at_half_voltage(voltage_curves[-1, :], current_curves[-1, :])
+ return G_off, G_on, n_avg, n_std
+
+
+def load_Ta_HfO2():
+ """Load Ta/HfO2 data.
+
+ Returns:
+ Array of shape `(num_cycles, num_pulses, num_bit_lines, num_word_lines)`.
+ The first half of `num_pulses` denotes potentiation, while the second
+ half denotes depression.
+ """
+ path = os.path.join(_create_and_get_data_dir(), "Ta_HfO2-data.mat")
+ _validate_data_path(path)
+ f = h5py.File(path, "r")
+ data = f.get("G_reads")
+ data = np.array(data)
+ return data
+
+
+def extract_G_off_and_G_on(data: np.ndarray) -> tuple[float, float]:
+ shape = data.shape
+ data = np.reshape(data, (shape[0] * shape[1], shape[2] * shape[3]))
+ G_offs = np.min(data, axis=0)
+ G_off = np.median(G_offs)
+ G_ons = np.max(data, axis=0)
+ G_on = np.median(G_ons)
+
+ return G_off, G_on
+
+
+def extract_stuck(data: np.ndarray, G_off: float, G_on: float) -> tuple[list[float], float]:
+ median_range = G_on - G_off
+ shape = data.shape
+ data = np.reshape(data, (shape[0] * shape[1], shape[2] * shape[3]))
+ mins = np.min(data, axis=0)
+ maxs = np.max(data, axis=0)
+ ranges = maxs - mins
+ means = np.mean(data, axis=0)
+ stuck_values = means[np.where(ranges < stuck_device_threshold(median_range))]
+ probability_stuck = stuck_values.shape[0] / means.shape[0]
+ return stuck_values.tolist(), probability_stuck
+
+
+def stuck_device_threshold(median_range):
+ return median_range / 2
+
+
+def _validate_data_path(path: str, url: Optional[str] = None) -> None:
+ if os.path.isfile(path):
+ return
+
+ if url is None:
+ raise ValueError(f'Data file "{path}" does not exist and the URL has not been provided.')
+
+ with open(path, "wb") as file:
+ response = requests.get(url)
+ file.write(response.content)
+
+
+def _create_and_get_data_dir() -> str:
+ dir_name = ".data"
+ os.makedirs(dir_name, exist_ok=True)
+ return dir_name
diff --git a/awarememristor/simulations/devices.py b/awarememristor/simulations/devices.py
new file mode 100644
index 0000000..a87661b
--- /dev/null
+++ b/awarememristor/simulations/devices.py
@@ -0,0 +1,102 @@
+from typing import Any
+
+from awarememristor.crossbar.nonidealities import (D2DLognormal,
+ IVNonlinearity, Nonideality,
+ StuckAtGOff, StuckAtGOn,
+ StuckDistribution)
+from awarememristor.simulations import data
+
+
+def ideal() -> dict[str, Any]:
+ return {"G_off": None, "G_on": None, "nonidealities": []}
+
+
+def SiO_x_V_ref() -> dict[str, float]:
+ exp_data = data.load_SiO_x_multistate()
+ (voltages, _), (_, _) = data.low_high_n_SiO_x_curves(exp_data)
+ V_ref = voltages[0][-1] / 2
+
+ return {"V_ref": float(V_ref)}
+
+
+def _SiO_x_G(is_high_nonlinearity: bool) -> dict[str, float]:
+ exp_data = data.load_SiO_x_multistate()
+ G_off, G_on, _, _ = data.low_high_n_SiO_x_vals(exp_data, is_high_nonlinearity)
+ return {
+ "G_off": float(G_off),
+ "G_on": float(G_on),
+ }
+
+
+def _SiO_x_nonidealities(is_high_nonlinearity: bool) -> dict[str, list[Nonideality]]:
+ exp_data = data.load_SiO_x_multistate()
+ _, _, n_avg, n_std = data.low_high_n_SiO_x_vals(exp_data, is_high_nonlinearity)
+ V_ref = SiO_x_V_ref()["V_ref"]
+ return {
+ "nonidealities": [IVNonlinearity(V_ref, float(n_avg), float(n_std))],
+ }
+
+
+def SiO_x(is_high_nonlinearity: bool) -> dict[str, Any]:
+ return {
+ **_SiO_x_G(is_high_nonlinearity),
+ **_SiO_x_nonidealities(is_high_nonlinearity),
+ }
+
+
+def stuck_off() -> dict[str, Any]:
+ G = _SiO_x_G(True)
+ return {
+ **G,
+ "nonidealities": [
+ StuckAtGOff(G["G_off"], 0.05),
+ ],
+ }
+
+
+def SiO_x_high_nonlinearity_and_stuck_on() -> dict[str, Any]:
+ is_high_nonlinearity = True
+ G = _SiO_x_G(is_high_nonlinearity)
+ nonidealities = _SiO_x_nonidealities(is_high_nonlinearity)["nonidealities"] + [
+ StuckAtGOn(G["G_on"], 0.05)
+ ]
+ return {
+ **G,
+ "nonidealities": nonidealities,
+ }
+
+
+def more_uniform_d2d() -> dict[str, Any]:
+ G = _SiO_x_G(True)
+ return {
+ **G,
+ "nonidealities": [D2DLognormal(G["G_off"], G["G_on"], 0.25, 0.25)],
+ }
+
+
+def less_uniform_d2d() -> dict[str, Any]:
+ G = _SiO_x_G(True)
+ return {
+ **G,
+ "nonidealities": [D2DLognormal(G["G_off"], G["G_on"], 0.05, 0.5)],
+ }
+
+
+def high_magnitude_more_uniform_d2d() -> dict[str, Any]:
+ G = _SiO_x_G(True)
+ return {
+ **G,
+ "nonidealities": [D2DLognormal(G["G_off"], G["G_on"], 0.5, 0.5)],
+ }
+
+
+def Ta_HfO2() -> dict[str, Any]:
+ exp_data = data.load_Ta_HfO2()
+ G_off, G_on = data.extract_G_off_and_G_on(exp_data)
+ G_off, G_on = float(G_off), float(G_on)
+ vals, p = data.extract_stuck(exp_data, G_off, G_on)
+ return {
+ "G_off": G_off,
+ "G_on": G_on,
+ "nonidealities": [StuckDistribution(vals, p)],
+ }
diff --git a/awarememristor/simulations/ideal.py b/awarememristor/simulations/ideal.py
new file mode 100644
index 0000000..9db8fe7
--- /dev/null
+++ b/awarememristor/simulations/ideal.py
@@ -0,0 +1,49 @@
+from awarememristor.simulations import (iv_nonlinearity,
+ iv_nonlinearity_and_stuck_on,
+ iv_nonlinearity_cnn,
+ memristive_validation,
+ stuck_distribution, stuck_off,
+ weight_implementation)
+from awarememristor.training.iterator import Iterator
+
+
+def get_mnist_iterator():
+ iterators = [
+ iv_nonlinearity.get_ideal_iterator(),
+ stuck_off.get_ideal_iterator(),
+ iv_nonlinearity_and_stuck_on.get_ideal_iterator(),
+ stuck_distribution.get_ideal_iterator(),
+ weight_implementation.get_ideal_iterator(),
+ memristive_validation.get_ideal_iterator(),
+ ]
+
+ return Iterator(
+ "mnist",
+ iterators[0].training,
+ [inference for iterator in iterators for inference in iterator.inferences],
+ )
+
+
+def get_cifar10_iterator():
+ iterators = [
+ iv_nonlinearity_cnn.get_ideal_iterator(),
+ ]
+
+ return Iterator(
+ "cifar10",
+ iterators[0].training,
+ [inference for iterator in iterators for inference in iterator.inferences],
+ )
+
+
+def get_iterators():
+ return [
+ get_mnist_iterator(),
+ get_cifar10_iterator(),
+ ]
+
+
+def main():
+ for iterator in get_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.infer()
diff --git a/awarememristor/simulations/iv_nonlinearity.py b/awarememristor/simulations/iv_nonlinearity.py
new file mode 100644
index 0000000..0d1f2ad
--- /dev/null
+++ b/awarememristor/simulations/iv_nonlinearity.py
@@ -0,0 +1,39 @@
+from awarememristor.simulations import devices, utils
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "mnist"
+
+
+def custom_iterator(training_setup, inference_setups, is_regularized):
+ inferences = [Inference(**utils.get_inference_params(), **setup) for setup in inference_setups]
+ training = Training(
+ **utils.get_training_params(), is_regularized=is_regularized, **training_setup
+ )
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_ideal_iterator():
+ return custom_iterator(devices.ideal(), [devices.SiO_x(False), devices.SiO_x(True)], False)
+
+
+def get_nonideal_iterators():
+ return [
+ custom_iterator(devices.SiO_x(False), [devices.SiO_x(False)], False),
+ custom_iterator(devices.SiO_x(False), [devices.SiO_x(False)], True),
+ custom_iterator(devices.SiO_x(True), [devices.SiO_x(True)], False),
+ custom_iterator(devices.SiO_x(True), [devices.SiO_x(True)], True),
+ ]
+
+
+def get_iterators():
+ return [
+ get_ideal_iterator(),
+ *get_nonideal_iterators(),
+ ]
+
+
+def main():
+ for iterator in get_nonideal_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.infer()
diff --git a/awarememristor/simulations/iv_nonlinearity_and_stuck_on.py b/awarememristor/simulations/iv_nonlinearity_and_stuck_on.py
new file mode 100644
index 0000000..f79d838
--- /dev/null
+++ b/awarememristor/simulations/iv_nonlinearity_and_stuck_on.py
@@ -0,0 +1,39 @@
+from awarememristor.simulations import devices, utils
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "mnist"
+
+
+def custom_iterator(training_setup, inference_setups):
+ inferences = [Inference(**utils.get_inference_params(), **setup) for setup in inference_setups]
+ training = Training(**utils.get_training_params(), is_regularized=False, **training_setup)
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_ideal_iterator():
+ return custom_iterator(devices.ideal(), [devices.SiO_x_high_nonlinearity_and_stuck_on()])
+
+
+def get_nonideal_iterators():
+ iterators = [
+ custom_iterator(
+ devices.SiO_x_high_nonlinearity_and_stuck_on(),
+ [devices.SiO_x_high_nonlinearity_and_stuck_on()],
+ ),
+ ]
+
+ return iterators
+
+
+def get_iterators():
+ return [
+ get_ideal_iterator(),
+ *get_nonideal_iterators(),
+ ]
+
+
+def main():
+ for iterator in get_nonideal_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.infer()
diff --git a/awarememristor/simulations/iv_nonlinearity_cnn.py b/awarememristor/simulations/iv_nonlinearity_cnn.py
new file mode 100644
index 0000000..af49995
--- /dev/null
+++ b/awarememristor/simulations/iv_nonlinearity_cnn.py
@@ -0,0 +1,34 @@
+from awarememristor.simulations import devices, utils
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "cifar10"
+
+
+def custom_iterator(training_setup, inference_setups):
+ inferences = [Inference(**utils.get_inference_params(), **setup) for setup in inference_setups]
+ training = Training(**utils.get_training_params(), is_regularized=False, **training_setup)
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_ideal_iterator():
+ return custom_iterator(devices.ideal(), [devices.SiO_x(True)])
+
+
+def get_nonideal_iterators():
+ return [
+ custom_iterator(devices.SiO_x(True), [devices.SiO_x(True)]),
+ ]
+
+
+def get_iterators():
+ return [
+ get_ideal_iterator(),
+ *get_nonideal_iterators(),
+ ]
+
+
+def main():
+ for iterator in get_nonideal_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.infer()
diff --git a/awarememristor/simulations/memristive_validation.py b/awarememristor/simulations/memristive_validation.py
new file mode 100644
index 0000000..4133c0c
--- /dev/null
+++ b/awarememristor/simulations/memristive_validation.py
@@ -0,0 +1,55 @@
+from awarememristor.simulations import devices, utils
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "mnist"
+
+
+def custom_iterator(
+ training_setup, inference_setups, use_combined_validation, num_training_repeats: int = None
+):
+ training_params = utils.get_training_params()
+ if num_training_repeats is not None:
+ training_params["num_repeats"] = num_training_repeats
+ inferences = [Inference(**utils.get_inference_params(), **setup) for setup in inference_setups]
+ training = Training(
+ **training_params,
+ is_regularized=False,
+ use_combined_validation=use_combined_validation,
+ **training_setup
+ )
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_ideal_iterator():
+ return custom_iterator(devices.ideal(), [devices.high_magnitude_more_uniform_d2d()], False)
+
+
+def get_nonideal_iterators():
+ return [
+ custom_iterator(
+ devices.high_magnitude_more_uniform_d2d(),
+ [devices.high_magnitude_more_uniform_d2d()],
+ True,
+ # Validation is utilized during training, so to evaluate the
+ # effectiveness of different methods, we need to increase the
+ # sample size of trained networks.
+ num_training_repeats=100,
+ ),
+ ]
+
+
+def get_iterators():
+ return [
+ get_ideal_iterator(),
+ *get_nonideal_iterators(),
+ ]
+
+
+def main():
+ for iterator in get_nonideal_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.training.is_standard_validation_mode = False
+ iterator.infer()
+ iterator.training.is_standard_validation_mode = True
+ iterator.infer()
diff --git a/awarememristor/simulations/nonideality_agnosticism.py b/awarememristor/simulations/nonideality_agnosticism.py
new file mode 100644
index 0000000..24dfebd
--- /dev/null
+++ b/awarememristor/simulations/nonideality_agnosticism.py
@@ -0,0 +1,59 @@
+from awarememristor.simulations import (devices, ideal, iv_nonlinearity,
+ iv_nonlinearity_and_stuck_on,
+ memristive_validation,
+ stuck_distribution, stuck_off, utils,
+ weight_implementation)
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "mnist"
+
+
+def custom_iterator(training_setup, inference_setups, is_regularized):
+ inferences = [Inference(**utils.get_inference_params(), **setup) for setup in inference_setups]
+ training = Training(
+ **utils.get_training_params(), is_regularized=is_regularized, **training_setup
+ )
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_iterators():
+ inference_setups = [
+ devices.ideal(),
+ devices.SiO_x(False),
+ devices.SiO_x(True),
+ devices.stuck_off(),
+ devices.SiO_x_high_nonlinearity_and_stuck_on(),
+ devices.more_uniform_d2d(),
+ devices.less_uniform_d2d(),
+ devices.high_magnitude_more_uniform_d2d(),
+ devices.Ta_HfO2(),
+ ]
+
+ iterators = [
+ ideal.get_mnist_iterator(),
+ *iv_nonlinearity.get_nonideal_iterators(),
+ *iv_nonlinearity_and_stuck_on.get_nonideal_iterators(),
+ *stuck_off.get_nonideal_iterators(),
+ *weight_implementation.get_nonideal_iterators()[-4:],
+ *stuck_distribution.get_nonideal_iterators(),
+ memristive_validation.get_nonideal_iterators()[0],
+ ]
+ inferences = [
+ Inference(**utils.get_inference_params(), **inference_setup)
+ for inference_setup in inference_setups
+ ]
+
+ for idx, iterator in enumerate(iterators):
+ # Use the same number of repeats for all training setups.
+ iterator.training.num_repeats = utils.get_training_params()["num_repeats"]
+ for inference in inferences:
+ if inference not in iterator.inferences:
+ iterators[idx].inferences.append(inference)
+
+ return iterators
+
+
+def main():
+ for iterator in get_iterators():
+ iterator.infer()
diff --git a/awarememristor/simulations/stuck_distribution.py b/awarememristor/simulations/stuck_distribution.py
new file mode 100644
index 0000000..0873165
--- /dev/null
+++ b/awarememristor/simulations/stuck_distribution.py
@@ -0,0 +1,34 @@
+from awarememristor.simulations import devices, utils
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "mnist"
+
+
+def custom_iterator(training_setup, inference_setups):
+ inferences = [Inference(**utils.get_inference_params(), **setup) for setup in inference_setups]
+ training = Training(**utils.get_training_params(), is_regularized=False, **training_setup)
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_ideal_iterator():
+ return custom_iterator(devices.ideal(), [devices.Ta_HfO2()])
+
+
+def get_nonideal_iterators():
+ return [
+ custom_iterator(devices.Ta_HfO2(), [devices.Ta_HfO2()]),
+ ]
+
+
+def get_iterators():
+ return [
+ get_ideal_iterator(),
+ *get_nonideal_iterators(),
+ ]
+
+
+def main():
+ for iterator in get_nonideal_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.infer()
diff --git a/awarememristor/simulations/stuck_off.py b/awarememristor/simulations/stuck_off.py
new file mode 100644
index 0000000..8968b6f
--- /dev/null
+++ b/awarememristor/simulations/stuck_off.py
@@ -0,0 +1,34 @@
+from awarememristor.simulations import devices, utils
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "mnist"
+
+
+def custom_iterator(training_setup, inference_setups):
+ inferences = [Inference(**utils.get_inference_params(), **setup) for setup in inference_setups]
+ training = Training(**utils.get_training_params(), is_regularized=False, **training_setup)
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_ideal_iterator():
+ return custom_iterator(devices.ideal(), [devices.stuck_off()])
+
+
+def get_nonideal_iterators():
+ return [
+ custom_iterator(devices.stuck_off(), [devices.stuck_off()]),
+ ]
+
+
+def get_iterators():
+ return [
+ get_ideal_iterator(),
+ *get_nonideal_iterators(),
+ ]
+
+
+def main():
+ for iterator in get_nonideal_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.infer()
diff --git a/awarememristor/simulations/utils.py b/awarememristor/simulations/utils.py
new file mode 100644
index 0000000..a13e3ca
--- /dev/null
+++ b/awarememristor/simulations/utils.py
@@ -0,0 +1,30 @@
+def get_training_params():
+ return {
+ "num_repeats": 5,
+ "num_epochs": 1000,
+ "batch_size": 64,
+ }
+
+
+def get_inference_params():
+ return {
+ "num_repeats": 25,
+ }
+
+
+def get_energy_efficiency(
+ avg_power: float,
+ num_neurons_lst: list[int] = [784, 25, 10],
+ read_time: float = 50e-9,
+):
+ num_synapses = get_num_synapses(num_neurons_lst)
+ energy_efficiency = (2 * num_synapses) / (read_time * avg_power)
+ return energy_efficiency
+
+
+def get_num_synapses(num_neurons_lst: list[int]):
+ num_synapses = 0
+ for idx, num_neurons in enumerate(num_neurons_lst[:-1]):
+ num_synapses += (num_neurons + 1) * num_neurons_lst[idx + 1]
+
+ return num_synapses
diff --git a/awarememristor/simulations/weight_implementation.py b/awarememristor/simulations/weight_implementation.py
new file mode 100644
index 0000000..df00fca
--- /dev/null
+++ b/awarememristor/simulations/weight_implementation.py
@@ -0,0 +1,82 @@
+from awarememristor.simulations import devices, utils
+from awarememristor.training.iterator import Inference, Iterator, Training
+
+DATASET = "mnist"
+
+
+def custom_iterator(
+ training_setup,
+ inference_setups,
+ is_regularized=False,
+ force_standard_w=False,
+ mapping_rule="default",
+):
+ inferences = [
+ Inference(mapping_rule=mapping_rule, **utils.get_inference_params(), **setup)
+ for setup in inference_setups
+ ]
+ training = Training(
+ **utils.get_training_params(),
+ is_regularized=is_regularized,
+ force_standard_w=force_standard_w,
+ mapping_rule=mapping_rule,
+ **training_setup
+ )
+
+ return Iterator(DATASET, training, inferences)
+
+
+def get_ideal_iterator():
+ iterator = custom_iterator(
+ devices.ideal(), [devices.more_uniform_d2d(), devices.less_uniform_d2d()], False
+ )
+
+ return iterator
+
+
+def get_nonideal_iterators():
+ return [
+ custom_iterator(
+ devices.more_uniform_d2d(),
+ [devices.more_uniform_d2d()],
+ force_standard_w=True,
+ mapping_rule="avg",
+ ),
+ custom_iterator(
+ devices.more_uniform_d2d(),
+ [devices.more_uniform_d2d()],
+ force_standard_w=True,
+ ),
+ custom_iterator(
+ devices.less_uniform_d2d(),
+ [devices.less_uniform_d2d()],
+ force_standard_w=True,
+ mapping_rule="avg",
+ ),
+ custom_iterator(
+ devices.less_uniform_d2d(),
+ [devices.less_uniform_d2d()],
+ force_standard_w=True,
+ ),
+ custom_iterator(devices.more_uniform_d2d(), [devices.more_uniform_d2d()]),
+ custom_iterator(
+ devices.more_uniform_d2d(), [devices.more_uniform_d2d()], is_regularized=True
+ ),
+ custom_iterator(devices.less_uniform_d2d(), [devices.less_uniform_d2d()]),
+ custom_iterator(
+ devices.less_uniform_d2d(), [devices.less_uniform_d2d()], is_regularized=True
+ ),
+ ]
+
+
+def get_iterators():
+ return [
+ get_ideal_iterator(),
+ *get_nonideal_iterators(),
+ ]
+
+
+def main():
+ for iterator in get_nonideal_iterators():
+ iterator.train(use_test_callback=True)
+ iterator.infer()
diff --git a/awarememristor/training/__init__.py b/awarememristor/training/__init__.py
new file mode 100644
index 0000000..c426286
--- /dev/null
+++ b/awarememristor/training/__init__.py
@@ -0,0 +1 @@
+from awarememristor.training import iterator
diff --git a/awarememristor/training/architecture.py b/awarememristor/training/architecture.py
new file mode 100644
index 0000000..1c69491
--- /dev/null
+++ b/awarememristor/training/architecture.py
@@ -0,0 +1,229 @@
+import numpy as np
+import tensorflow as tf
+from tensorflow.keras import constraints, layers, models
+
+from awarememristor import crossbar
+from awarememristor.training import utils
+
+
+def get_model(iterator, custom_weights=None, custom_weights_path=None):
+ num_hidden_neurons = 25
+ if iterator.dataset == "mnist":
+ model = models.Sequential()
+ model.add(layers.Flatten(input_shape=(28, 28)))
+ model.add(MemristorDense(784, num_hidden_neurons, iterator))
+ model.add(layers.Activation("sigmoid"))
+ model.add(MemristorDense(num_hidden_neurons, 10, iterator))
+ model.add(layers.Activation("softmax"))
+ elif iterator.dataset == "cifar10":
+ model = models.Sequential()
+
+ # Convolutional layers
+ model.add(layers.Conv2D(32, (3, 3), activation="relu", input_shape=(32, 32, 3)))
+ model.add(layers.MaxPooling2D((2, 2)))
+ model.add(layers.Conv2D(64, (3, 3), activation="relu"))
+ model.add(layers.MaxPooling2D((2, 2)))
+ # Ensure inputs bounded between 0 and 1 for first memristive layer.
+ model.add(layers.Conv2D(64, (3, 3), activation=tf.keras.layers.ReLU(max_value=1.0)))
+
+ # Fully connected layers
+ model.add(layers.Flatten())
+ model.add(MemristorDense(1024, num_hidden_neurons, iterator))
+ model.add(layers.Activation("sigmoid"))
+ model.add(MemristorDense(num_hidden_neurons, 10, iterator))
+ model.add(layers.Activation("softmax"))
+ else:
+ raise ValueError(f"Dataset {iterator.dataset} is not recognised!")
+
+ if custom_weights is not None:
+ model.set_weights(custom_weights)
+ elif custom_weights_path is not None:
+ model.load_weights(custom_weights_path)
+ elif not iterator.is_training:
+ model.load_weights(iterator.weights_path())
+
+ model.compile(
+ optimizer=tf.keras.optimizers.SGD(),
+ loss=tf.keras.losses.SparseCategoricalCrossentropy(),
+ metrics=["accuracy"],
+ )
+ return model
+
+
+class MemristorDense(layers.Layer):
+ def __init__(self, n_in, n_out, iterator, **kwargs):
+ self.n_in = n_in
+ self.n_out = n_out
+ self.iterator = iterator
+ super(MemristorDense, self).__init__(**kwargs)
+
+ # Adding this function removes an issue with custom layer checkpoint
+ def get_config(self):
+ config = super().get_config().copy()
+ config.update(
+ {
+ "n_in": self.n_in,
+ "n_out": self.n_out,
+ }
+ )
+ return config
+
+ # Create trainable weights and biases
+ def build(self, input_shape):
+ stdv = 1 / np.sqrt(self.n_in)
+
+ kwargs = {}
+ if self.iterator.training.is_regularized:
+ reg_gamma = 1e-4
+ kwargs["regularizer"] = tf.keras.regularizers.l1(reg_gamma)
+
+ if self.iterator.training.uses_double_weights():
+ self.w_pos = self.add_weight(
+ shape=(self.n_in, self.n_out),
+ initializer=tf.keras.initializers.RandomNormal(mean=0.5, stddev=stdv),
+ name="weights_pos",
+ trainable=True,
+ constraint=constraints.NonNeg(),
+ **kwargs,
+ )
+
+ self.w_neg = self.add_weight(
+ shape=(self.n_in, self.n_out),
+ initializer=tf.keras.initializers.RandomNormal(mean=0.5, stddev=stdv),
+ name="weights_neg",
+ trainable=True,
+ constraint=constraints.NonNeg(),
+ **kwargs,
+ )
+
+ self.b_pos = self.add_weight(
+ shape=(self.n_out,),
+ initializer=tf.keras.initializers.Constant(value=0.5),
+ name="biases_pos",
+ trainable=True,
+ constraint=constraints.NonNeg(),
+ **kwargs,
+ )
+
+ self.b_neg = self.add_weight(
+ shape=(self.n_out,),
+ initializer=tf.keras.initializers.Constant(value=0.5),
+ name="biases_neg",
+ trainable=True,
+ constraint=constraints.NonNeg(),
+ **kwargs,
+ )
+ else:
+ self.w = self.add_weight(
+ shape=(self.n_in, self.n_out),
+ initializer=tf.keras.initializers.RandomNormal(mean=0.0, stddev=stdv),
+ name="weights",
+ trainable=True,
+ **kwargs,
+ )
+
+ self.b = self.add_weight(
+ shape=(self.n_out,),
+ initializer=tf.keras.initializers.Constant(value=0.0),
+ name="biases",
+ trainable=True,
+ **kwargs,
+ )
+
+ def combined_weights(self):
+ if self.iterator.training.uses_double_weights():
+ b_pos = tf.expand_dims(self.b_pos, axis=0)
+ b_neg = tf.expand_dims(self.b_neg, axis=0)
+ combined_weights_pos = tf.concat([self.w_pos, b_pos], 0)
+ combined_weights_neg = tf.concat([self.w_neg, b_neg], 0)
+
+ # Interleave positive and negative weights
+ combined_weights = tf.reshape(
+ tf.concat(
+ [
+ combined_weights_pos[..., tf.newaxis],
+ combined_weights_neg[..., tf.newaxis],
+ ],
+ axis=-1,
+ ),
+ [tf.shape(combined_weights_pos)[0], -1],
+ )
+ else:
+ bias = tf.expand_dims(self.b, axis=0)
+ combined_weights = tf.concat([self.w, bias], 0)
+
+ return combined_weights
+
+ def call(self, x, mask=None):
+ if (
+ not self.iterator.training.uses_double_weights()
+ and not self.iterator.current_stage().is_nonideal()
+ ):
+ return tf.tensordot(x, self.w, axes=1) + self.b
+
+ ones = tf.ones([tf.shape(x)[0], 1])
+ inputs = tf.concat([x, ones], 1)
+
+ self.out = self.memristive_outputs(inputs, self.combined_weights())
+
+ return self.out
+
+ def memristive_outputs(self, x, weights):
+ current_stage = self.iterator.current_stage()
+
+ if current_stage.is_nonideal():
+ G_off = current_stage.G_off
+ G_on = current_stage.G_on
+ k_V = current_stage.k_V()
+ else:
+ # Handle case when training is aware, but inference assumes no
+ # nonidealities. This will not affect accuracy, but will affect
+ # power consumption.
+ G_off = self.iterator.training.G_off
+ G_on = self.iterator.training.G_on
+ k_V = self.iterator.training.k_V()
+
+ # Mapping inputs onto voltages.
+ V = crossbar.map.x_to_V(x, k_V)
+
+ # Mapping weights onto conductances.
+ if self.iterator.training.uses_double_weights():
+ G, max_weight = crossbar.map.double_w_to_G(weights, G_off, G_on)
+ else:
+ G, max_weight = crossbar.map.w_to_G(weights, G_off, G_on, current_stage.mapping_rule)
+
+ # Linearity-preserving nonidealities
+ for nonideality in current_stage.nonidealities:
+ if isinstance(nonideality, crossbar.nonidealities.LinearityPreserving):
+ G = nonideality.disturb_G(G)
+
+ # Linearity-nonpreserving nonidealities
+ I = None
+ I_ind = None
+ for nonideality in current_stage.nonidealities:
+ if isinstance(nonideality, crossbar.nonidealities.LinearityNonpreserving):
+ I, I_ind = nonideality.compute_I(V, G)
+
+ # Ideal case for computing output currents.
+ if I is None or I_ind is None:
+ if self.iterator.is_training:
+ I = crossbar.ideal.compute_I(V, G)
+ else:
+ I, I_ind = crossbar.ideal.compute_I_all(V, G)
+
+ if self.iterator.compute_power:
+ power_path = self.iterator.power_path()
+ P_avg = utils.compute_avg_crossbar_power(V, I_ind)
+ with open(power_path, mode="a", encoding="utf-8"):
+ tf.print(P_avg, output_stream=f"file://{power_path}")
+
+ # Converting to outputs.
+ y_disturbed = crossbar.map.I_to_y(I, k_V, max_weight, G_on, G_off)
+
+ return y_disturbed
+
+ def get_output_shape_for(self, input_shape):
+ return (input_shape[0], self.n_out)
+
+ def compute_output_shape(self, input_shape):
+ return (input_shape[0], self.n_out)
diff --git a/awarememristor/training/callbacks.py b/awarememristor/training/callbacks.py
new file mode 100644
index 0000000..fe7ff15
--- /dev/null
+++ b/awarememristor/training/callbacks.py
@@ -0,0 +1,310 @@
+import copy
+import os
+import time
+from abc import ABC, abstractmethod
+from typing import Any
+
+import numpy as np
+import tensorflow as tf
+
+from awarememristor.training import architecture
+
+
+class Callback(ABC):
+ """Abstract class that requires implementation of callback name."""
+
+ @staticmethod
+ @abstractmethod
+ def name() -> str:
+ """Returns name of the callback."""
+
+
+class Checkpoint:
+ """Used only to mark certain callbacks as checkpoint callbacks."""
+
+
+class MemristiveCallback(tf.keras.callbacks.Callback):
+ """Computes a metric multiple times in order to take the stochastic nature
+ of memristive devices into account."""
+
+ def __init__(self, iterator, history=None) -> None:
+ self.iterator = copy.copy(iterator)
+ self.validation_freq = 20
+ if iterator.training.memristive_validation_freq is not None:
+ self.validation_freq = iterator.training.memristive_validation_freq
+ self.testing_freq = 20
+ self.num_repeats = 20
+ self.history = history
+
+ def should_skip_epoch(self, epoch, is_validation=False) -> bool:
+ freq = self.testing_freq
+ if is_validation:
+ freq = self.validation_freq
+ # Will evaluate on first epoch and then every `freq` epochs.
+ if epoch != 0 and (epoch + 1) % freq != 0:
+ return True
+ return False
+
+ def evaluate(self, model, data, num_repeats: int = None):
+ if num_repeats is None:
+ num_repeats = self.num_repeats
+
+ accuracy = []
+ loss = []
+
+ start_time = time.time()
+ for _ in range(num_repeats):
+ single_loss, single_accuracy = model.evaluate(data, verbose=0)
+ loss.append(single_loss)
+ accuracy.append(single_accuracy)
+ num_total_batches = data.cardinality().numpy() * num_repeats
+
+ end_time = time.time()
+ duration = int(end_time - start_time)
+ if num_repeats == 1:
+ loss = loss[0]
+ accuracy = accuracy[0]
+
+ return loss, accuracy, duration, num_total_batches
+
+ def evaluation_results_str(
+ self,
+ num_total_batches: int,
+ duration: int,
+ loss: float,
+ accuracy: float,
+ prepend_metrics: str = None,
+ label: str = None,
+ ) -> str:
+ str_ = f"{num_total_batches}/{num_total_batches}"
+ str_ += f" - {duration}s - "
+ if prepend_metrics:
+ str_ += f"{prepend_metrics}_"
+ str_ += f"loss: {loss:.4f} - "
+ if prepend_metrics:
+ str_ += f"{prepend_metrics}_"
+ str_ += f"accuracy: {accuracy:.4f}"
+ if label is not None:
+ str_ += f" [{label}]"
+
+ return str_
+
+ def saving_weights_str(
+ self, accuracy: float, previous_best_accuracy: float, prepend: str = None
+ ) -> str:
+ str_ = ""
+ if prepend is not None:
+ str_ += f"{prepend}_"
+ str_ += f"accuracy ({accuracy:.4f}) improved over previous best result ({previous_best_accuracy:.4f}). Saving weights..."
+ return str_
+
+ def info(self) -> dict[str, Any]:
+ return {
+ "history": self.history,
+ }
+
+
+class TestCallback(MemristiveCallback, Callback):
+ """Compute test accuracy for all inference setups during training."""
+
+ def __init__(self, iterator) -> None:
+ MemristiveCallback.__init__(self, iterator)
+ self.iterator.is_training = False
+ self.history = [
+ {
+ "label": inference.label(),
+ "epoch_no": [],
+ "loss": [],
+ "accuracy": [],
+ }
+ for inference in self.iterator.inferences
+ ]
+
+ def on_epoch_end(self, epoch, logs=None):
+ if self.should_skip_epoch(epoch, is_validation=False):
+ return
+
+ model_weights = self.model.get_weights()
+
+ for inference_idx, inference in enumerate(self.iterator.inferences):
+ self.iterator.inference_idx = inference_idx
+ data = self.iterator.data("testing")
+ callback_model = architecture.get_model(self.iterator, custom_weights=model_weights)
+ loss, accuracy, duration, num_total_batches = self.evaluate(callback_model, data)
+ self.history[inference_idx]["loss"].append(loss)
+ self.history[inference_idx]["accuracy"].append(accuracy)
+ self.history[inference_idx]["epoch_no"].append(epoch + 1)
+ results_str = self.evaluation_results_str(
+ num_total_batches,
+ duration,
+ np.median(loss),
+ np.median(accuracy),
+ prepend_metrics="median_test",
+ label=inference.nonideality_label(),
+ )
+ print(results_str)
+
+ self.iterator.inference_idx = 0
+
+ @staticmethod
+ def name():
+ return "memristive_test"
+
+
+class MemristiveCheckpoint(MemristiveCallback, Callback, Checkpoint):
+ """Evaluate accuracy on validation set multiple times to provide a more reliable measure of
+ learning progress.
+ """
+
+ def __init__(self, iterator) -> None:
+ MemristiveCallback.__init__(self, iterator)
+ self.iterator.is_training = True
+ self.best_median_val_accuracy = 0.0
+ self.history = {"epoch_no": [], "loss": [], "accuracy": []}
+
+ def on_epoch_end(self, epoch, logs=None):
+ if self.should_skip_epoch(epoch, is_validation=True):
+ return
+
+ data = self.iterator.data("validation")
+ loss, accuracy, duration, num_total_batches = self.evaluate(self.model, data)
+
+ self.history["loss"].append(loss)
+ self.history["accuracy"].append(accuracy)
+ self.history["epoch_no"].append(epoch + 1)
+
+ median_val_loss = np.median(loss)
+ median_val_accuracy = np.median(accuracy)
+ print(
+ self.evaluation_results_str(
+ num_total_batches,
+ duration,
+ median_val_loss,
+ median_val_accuracy,
+ prepend_metrics="median_val",
+ )
+ )
+
+ if median_val_accuracy > self.best_median_val_accuracy:
+ print(
+ self.saving_weights_str(
+ median_val_accuracy, self.best_median_val_accuracy, prepend="median_val"
+ )
+ )
+ self.best_median_val_accuracy = median_val_accuracy
+ self.model.save_weights(self.iterator.weights_path())
+
+ @staticmethod
+ def name():
+ return "memristive_checkpoint"
+
+
+class StandardCheckpoint(tf.keras.callbacks.ModelCheckpoint, Callback, Checkpoint):
+ """Same as `tf.keras.callbacks.ModelCheckpoint`, but with a `name()`."""
+
+ def __init__(self, iterator) -> None:
+ tf.keras.callbacks.ModelCheckpoint.__init__(
+ self,
+ iterator.weights_path(),
+ monitor="val_accuracy",
+ save_best_only=True,
+ )
+
+ @staticmethod
+ def name():
+ return "standard_checkpoint"
+
+
+class CombinedCheckpoint(MemristiveCallback, Callback, Checkpoint):
+ """Used to test the effectiveness of memristive validation.
+
+ Two validation techniques (standard and memristive) are applied at the same
+ time during training.
+ """
+
+ def __init__(self, iterator) -> None:
+ MemristiveCallback.__init__(self, iterator)
+ self.iterator.is_training = True
+ self.best_median_val_accuracy = 0.0
+ self.best_standard_val_accuracy = 0.0
+ self.history = {
+ "epoch_no": [],
+ "loss": [],
+ "accuracy": [],
+ "standard_epoch_no": [],
+ "standard_loss": [],
+ "standard_accuracy": [],
+ }
+
+ def on_epoch_end(self, epoch, logs=None):
+ data = self.iterator.data("validation")
+
+ if self.should_skip_epoch(epoch, is_validation=True):
+ single_loss, single_accuracy, duration, num_total_batches = self.evaluate(
+ self.model,
+ data,
+ num_repeats=1,
+ )
+ self.history["standard_loss"].append(single_loss)
+ self.history["standard_accuracy"].append(single_accuracy)
+ self.history["standard_epoch_no"].append(epoch + 1)
+ print(
+ self.evaluation_results_str(
+ num_total_batches, duration, single_loss, single_accuracy, prepend_metrics="val"
+ )
+ )
+ else:
+ loss, accuracy, duration, num_total_batches = self.evaluate(self.model, data)
+
+ self.history["loss"].append(loss)
+ self.history["accuracy"].append(accuracy)
+ self.history["epoch_no"].append(epoch + 1)
+
+ median_val_loss = np.median(loss)
+ median_val_accuracy = np.median(accuracy)
+ print(
+ self.evaluation_results_str(
+ num_total_batches,
+ duration,
+ median_val_loss,
+ median_val_accuracy,
+ prepend_metrics="median_val",
+ )
+ )
+
+ single_loss = loss[0]
+ single_accuracy = accuracy[0]
+ self.history["standard_loss"].append(single_loss)
+ self.history["standard_accuracy"].append(single_accuracy)
+ self.history["standard_epoch_no"].append(epoch + 1)
+ print(
+ self.evaluation_results_str(
+ 0, 0, single_loss, single_accuracy, prepend_metrics="val"
+ )
+ )
+
+ if median_val_accuracy > self.best_median_val_accuracy:
+ self.iterator.training.is_standard_validation_mode = True
+ os.makedirs(self.iterator.weights_dir(), exist_ok=True)
+ print(
+ self.saving_weights_str(
+ median_val_accuracy, self.best_median_val_accuracy, prepend="median_val"
+ )
+ )
+ self.best_median_val_accuracy = median_val_accuracy
+ self.model.save_weights(self.iterator.weights_path())
+
+ if single_accuracy > self.best_standard_val_accuracy:
+ self.iterator.training.is_standard_validation_mode = False
+ os.makedirs(self.iterator.weights_dir(), exist_ok=True)
+ print(
+ self.saving_weights_str(
+ single_accuracy, self.best_standard_val_accuracy, prepend="val"
+ )
+ )
+ self.best_standard_val_accuracy = single_accuracy
+ self.model.save_weights(self.iterator.weights_path())
+
+ @staticmethod
+ def name():
+ return "combined_checkpoint"
diff --git a/awarememristor/training/iterator.py b/awarememristor/training/iterator.py
new file mode 100644
index 0000000..e2073c5
--- /dev/null
+++ b/awarememristor/training/iterator.py
@@ -0,0 +1,498 @@
+import os
+import pickle
+import warnings
+from typing import Any
+
+import numpy as np
+import tensorflow as tf
+import tensorflow_datasets as tfds
+
+from awarememristor.crossbar.nonidealities import (LinearityNonpreserving,
+ LinearityPreserving,
+ Nonideality)
+from awarememristor.simulations import devices
+from awarememristor.training import callbacks, network, utils
+
+warnings.simplefilter("default")
+
+
+class Iterable:
+ """Used to keep track of training or inference repeats."""
+
+ def __init__(self, num_repeats: int) -> None:
+ self.repeat_idx = 0
+ self.num_repeats = num_repeats
+
+ def __eq__(self, other):
+ return self.repeat_idx == other.repeat_idx and self.num_repeats == other.num_repeats
+
+
+class Stage(Iterable):
+ """Used for training and inference."""
+
+ def __init__(
+ self,
+ G_off: float = None,
+ G_on: float = None,
+ nonidealities: list[Nonideality] = None,
+ mapping_rule: str = "default",
+ num_repeats: int = 0,
+ ) -> None:
+ self.G_off = G_off
+ self.G_on = G_on
+ if nonidealities is None:
+ nonidealities = []
+ self.nonidealities = nonidealities
+ self.mapping_rule = mapping_rule
+ self.validate_nonidealities()
+ Iterable.__init__(self, num_repeats)
+
+ def __eq__(self, other):
+ return (
+ self.G_off == other.G_off
+ and self.G_on == other.G_on
+ and self.nonidealities == other.nonidealities
+ and self.mapping_rule == other.mapping_rule
+ and Iterable.__eq__(self, other)
+ )
+
+ def conductance_label(self) -> str:
+ if self.G_off is None and self.G_on is None:
+ return "none_none"
+
+ return f"{self.G_off:.3g}_{self.G_on:.3g}"
+
+ def nonideality_label(self) -> str:
+ if len(self.nonidealities) == 0:
+ return "ideal"
+
+ l = "+".join(nonideality.label() for nonideality in self.nonidealities)
+ if self.mapping_rule != "default":
+ l += f"__{self.mapping_rule}"
+ return l
+
+ def label(self) -> str:
+ return f"{self.conductance_label()}__{self.nonideality_label()}"
+
+ def is_nonideal(self) -> bool:
+ return len(self.nonidealities) > 0
+
+ def k_V(self) -> float:
+ for nonideality in self.nonidealities:
+ if isinstance(nonideality, LinearityNonpreserving):
+ return nonideality.k_V()
+
+ # Except for power consumption, `k_V` makes no difference for
+ # linearity-preserving nonidealities, thus using the same value as for
+ # SiO_x devices.
+ return 2 * devices.SiO_x_V_ref()["V_ref"]
+
+ def validate_nonidealities(self) -> None:
+ """Multiple linearity-preserving nonidealities or multiple
+ linearity-nonpreserving nonidealities are not currently supported; this
+ function makes sure that this condition is satisfied."""
+ num_linearity_preserving = 0
+ num_linearity_nonpreserving = 0
+ for nonideality in self.nonidealities:
+ if isinstance(nonideality, LinearityPreserving):
+ num_linearity_preserving += 1
+ elif isinstance(nonideality, LinearityNonpreserving):
+ num_linearity_nonpreserving += 1
+
+ for num, nonideality_type in zip(
+ [num_linearity_preserving, num_linearity_nonpreserving],
+ ["linearity-preserving", "linearity-nonpreserving"],
+ ):
+ if num > 1:
+ raise ValueError(
+ f"Current implementation does not support more than one {nonideality_type} nonideality."
+ )
+
+
+class Training(Stage, Iterable):
+ """Training configuration."""
+
+ def __init__(
+ self,
+ batch_size: int = 1,
+ validation_split: float = 0.2,
+ num_epochs: int = 1,
+ is_regularized: bool = False,
+ num_repeats: int = 0,
+ G_off: float = None,
+ G_on: float = None,
+ nonidealities: list[Nonideality] = None,
+ use_combined_validation: bool = False,
+ memristive_validation_freq: int = None,
+ mapping_rule: str = "default",
+ force_standard_w: bool = False,
+ ) -> None:
+ self.batch_size = batch_size
+ self.num_epochs = num_epochs
+ self.is_regularized = is_regularized
+ self.validation_split = validation_split
+ self.use_combined_validation = use_combined_validation
+ self.is_standard_validation_mode = False
+ self.memristive_validation_freq = memristive_validation_freq
+ self.force_standard_w = force_standard_w
+ Stage.__init__(
+ self,
+ G_off=G_off,
+ G_on=G_on,
+ nonidealities=nonidealities,
+ mapping_rule=mapping_rule,
+ num_repeats=num_repeats,
+ )
+
+ def regularized_label(self) -> str:
+ if self.is_regularized:
+ return "reg"
+ else:
+ return "nonreg"
+
+ def label(self) -> str:
+ l = f"{self.regularized_label()}__{self.batch_size}__{Stage.label(self)}"
+ if self.memristive_validation_freq is not None:
+ l += f"__val_freq_{self.memristive_validation_freq}"
+ if self.force_standard_w:
+ l += "__standard_w"
+ return l
+
+ def network_label(self) -> str:
+ return f"network-{self.repeat_idx}"
+
+ def uses_double_weights(self) -> bool:
+ return self.is_nonideal() and not self.force_standard_w
+
+
+class Inference(Stage):
+ """Inference configuration."""
+
+ def __init__(
+ self,
+ num_repeats: int = 0,
+ G_off: float = None,
+ G_on: float = None,
+ nonidealities: list[Nonideality] = None,
+ mapping_rule: str = "default",
+ ) -> None:
+ self.num_repeats = num_repeats
+ Stage.__init__(
+ self,
+ G_off=G_off,
+ G_on=G_on,
+ nonidealities=nonidealities,
+ mapping_rule=mapping_rule,
+ num_repeats=num_repeats,
+ )
+
+ def repeat_label(self) -> str:
+ return f"repeat-{self.repeat_idx}"
+
+
+class Iterator:
+ """A helper class used in simulations of memristive neural networks.
+
+ It is used to combine different training and inference setups.
+ """
+
+ def __init__(self, dataset: str, training: Training, inferences: list[Inference]) -> None:
+ self.dataset = dataset
+ self.training = training
+ self.inferences = inferences
+ self.compute_power = False
+ self.is_training = False
+ self.inference_idx = 0
+ self.test_batch_size = 100 # divisor of the size of the test set
+ self.__training_data = None
+ self.__validation_data = None
+ self.__testing_data = None
+ self.__train_split_boundary = int(100 * (1 - self.training.validation_split))
+
+ def data(self, subset: str) -> tf.data.Dataset:
+ if subset == "training":
+ if self.__training_data is not None:
+ return self.__training_data
+ split = f"train[:{self.__train_split_boundary}%]"
+ elif subset == "validation":
+ if self.__validation_data is not None:
+ return self.__validation_data
+ split = f"train[{self.__train_split_boundary}%:]"
+ elif subset == "testing":
+ if self.__testing_data is not None:
+ return self.__testing_data
+ split = "test"
+ else:
+ raise ValueError(f'Subset "{subset}" is not recognised!')
+
+ ds = tfds.load(
+ self.dataset,
+ split=split,
+ as_supervised=True,
+ shuffle_files=True,
+ )
+ size = ds.cardinality().numpy()
+
+ ds = ds.map(utils.normalize_img, num_parallel_calls=tf.data.AUTOTUNE)
+ if subset == "testing":
+ ds = ds.batch(self.test_batch_size)
+ ds = ds.cache()
+ else:
+ ds = ds.cache()
+ ds = ds.shuffle(size)
+ ds = ds.batch(self.training.batch_size)
+ if self.dataset == "cifar10" and subset == "training":
+ data_augmentation = tf.keras.Sequential(
+ [
+ tf.keras.layers.RandomTranslation(0.1, 0.1),
+ tf.keras.layers.RandomFlip("horizontal"),
+ ]
+ )
+ ds = ds.map(
+ lambda x, y: (data_augmentation(x, training=True), y),
+ num_parallel_calls=tf.data.AUTOTUNE,
+ )
+ ds = ds.prefetch(tf.data.AUTOTUNE)
+
+ if subset == "training":
+ self.__training_data = ds
+ elif subset == "validation":
+ self.__validation_data = ds
+ elif subset == "testing":
+ self.__testing_data = ds
+
+ print(f'Loaded dataset "{self.dataset}" ({subset}): {size} examples.')
+
+ return ds
+
+ def training_dir(self) -> str:
+ return os.path.join(os.getcwd(), "models", self.dataset, self.training.label())
+
+ def network_dir(self) -> str:
+ return os.path.join(self.training_dir(), self.training.network_label())
+
+ def weights_dir(self) -> str:
+ if self.training.use_combined_validation:
+ if self.training.is_standard_validation_mode:
+ return os.path.join(self.network_dir(), "standard-validation")
+ return os.path.join(self.network_dir(), "memristive-validation")
+ return self.network_dir()
+
+ def weights_path(self) -> str:
+ filename = "model.h5"
+ return os.path.join(self.weights_dir(), filename)
+
+ def info_path(self) -> str:
+ return os.path.join(self.network_dir(), "info.pkl")
+
+ def inference_nonideality_dir(self) -> str:
+ return os.path.join(self.weights_dir(), self.inferences[self.inference_idx].label())
+
+ def inference_repeat_dir(self) -> str:
+ return os.path.join(
+ self.inference_nonideality_dir(),
+ self.inferences[self.inference_idx].repeat_label(),
+ )
+
+ def power_path(self) -> str:
+ return os.path.join(self.inference_repeat_dir(), "power.csv")
+
+ def loss_path(self) -> str:
+ return os.path.join(self.inference_repeat_dir(), "loss.csv")
+
+ def accuracy_path(self) -> str:
+ return os.path.join(self.inference_repeat_dir(), "accuracy.csv")
+
+ def info(self) -> dict[str, Any]:
+ with open(self.info_path(), "rb") as pickle_file:
+ return pickle.load(pickle_file)
+
+ def current_stage(self) -> Stage:
+ if self.is_training:
+ return self.training
+ return self.inferences[self.inference_idx]
+
+ def training_curves(self, metric: str) -> tuple[np.ndarray, np.ndarray]:
+ if metric == "error":
+ y = self.info()["history"]["accuracy"]
+ else:
+ y = self.info()["history"][metric]
+
+ num_epochs = len(y)
+ x = np.arange(1, num_epochs + 1)
+
+ y = np.array(y)
+ if metric == "error":
+ y = 1 - y
+
+ return x, y
+
+ def _checkpoint_from_info(self) -> str:
+ if not self.training.is_nonideal():
+ return callbacks.StandardCheckpoint.name()
+ if self.training.use_combined_validation:
+ return callbacks.CombinedCheckpoint.name()
+ return callbacks.MemristiveCheckpoint.name()
+
+ def validation_curves(self, metric: str) -> tuple[np.ndarray, np.ndarray]:
+ checkpoint_name = self._checkpoint_from_info()
+ info = self.info()
+ if checkpoint_name == callbacks.StandardCheckpoint.name():
+ if metric == "error":
+ y = info["history"]["val_accuracy"]
+ else:
+ y = info["history"]["val_" + metric]
+ num_epochs = len(y)
+ x = np.arange(1, num_epochs + 1)
+ elif checkpoint_name == callbacks.MemristiveCheckpoint.name():
+ history = info["callback_infos"][callbacks.MemristiveCheckpoint.name()]["history"]
+ x = history["epoch_no"]
+ x = np.array(x)
+ if metric == "error":
+ y = history["accuracy"]
+ else:
+ y = history[metric]
+ elif checkpoint_name == callbacks.CombinedCheckpoint.name():
+ prepend = ""
+ if self.training.is_standard_validation_mode:
+ prepend = "standard_"
+ history = info["callback_infos"][callbacks.CombinedCheckpoint.name()]["history"]
+ x = history[f"{prepend}epoch_no"]
+ x = np.array(x)
+ if metric == "error":
+ y = history[f"{prepend}accuracy"]
+ else:
+ y = history[f"{prepend}{metric}"]
+
+ y = np.array(y)
+ if metric == "error":
+ y = 1 - y
+
+ return x, y
+
+ def training_testing_curves(self, metric: str, inference: Inference):
+ """Data from test callbacks during training."""
+
+ history = self.info()["callback_infos"]["memristive_test"]["history"][
+ self._memristive_test_callback_idx(inference)
+ ]
+
+ if metric == "error":
+ y = history["accuracy"]
+ else:
+ y = history[metric]
+
+ x = np.array(history["epoch_no"])
+
+ y = np.array(y)
+ if metric == "error":
+ y = 1 - y
+
+ return x, y
+
+ def _memristive_test_callback_idx(self, inference: Inference) -> int:
+ """Number of inferences might not equal the number of memristive test callbacks."""
+ label = inference.label()
+ for idx, history in enumerate(self.info()["callback_infos"]["memristive_test"]["history"]):
+ try:
+ if history["label"] == label:
+ return idx
+ except KeyError:
+ break
+
+ raise ValueError("Index not found.")
+
+ def _test_metric_existing(self, inference_idx: int, metric: str = "accuracy") -> np.ndarray:
+ """Return test metric for which we already have data."""
+ self.inference_idx = inference_idx
+ inference = self.inferences[self.inference_idx]
+ y = np.zeros((self.training.num_repeats, inference.num_repeats))
+
+ initial_training_repeat_idx = self.training.repeat_idx
+ self.training.repeat_idx = 0
+ for i in range(self.training.num_repeats):
+ inference.repeat_idx = 0
+ for j in range(inference.num_repeats):
+ if metric == "accuracy":
+ filename = self.accuracy_path()
+ elif metric == "loss":
+ filename = self.loss_path()
+ elif metric == "avg_power":
+ filename = self.power_path()
+ val = np.genfromtxt(filename)
+ if metric == "avg_power":
+ val = np.mean(val)
+ # Two synaptic layers.
+ val = 2 * val
+
+ y[i, j] = val
+
+ inference.repeat_idx += 1
+
+ self.training.repeat_idx += 1
+
+ self.training.repeat_idx = initial_training_repeat_idx
+ self.inference_idx = 0
+ return y
+
+ def test_metric(self, metric: str, inference_idx: int = 0) -> np.ndarray:
+ if metric in "error":
+ values = self._test_metric_existing(inference_idx, metric="accuracy")
+ else:
+ values = self._test_metric_existing(inference_idx, metric=metric)
+
+ if metric == "error":
+ values = 1 - values
+
+ return values
+
+ def train(self, use_test_callback: bool = False) -> None:
+ self.is_training = True
+
+ for _ in range(self.training.num_repeats):
+ if os.path.isdir(self.network_dir()):
+ warnings.warn(
+ f'Training directory "{self.network_dir()}" already exists. Skipping...'
+ )
+ self.training.repeat_idx += 1
+ continue
+ # New callbacks in each iteration because iterator changes.
+ train_callbacks: list[callbacks.Callback] = []
+
+ if use_test_callback:
+ train_callbacks.append(callbacks.TestCallback(self))
+
+ if self.training.use_combined_validation:
+ train_callbacks.append(callbacks.CombinedCheckpoint(self))
+ elif not self.training.is_nonideal():
+ train_callbacks.append(callbacks.StandardCheckpoint(self))
+ else:
+ train_callbacks.append(callbacks.MemristiveCheckpoint(self))
+
+ network.train(self, train_callbacks)
+ self.training.repeat_idx += 1
+
+ self.training.repeat_idx = 0
+
+ def infer(self) -> None:
+ self.is_training = False
+ self.compute_power = True
+ for idx in range(len(self.inferences)):
+ self.inference_idx = idx
+ if os.path.isdir(self.inference_nonideality_dir()):
+ warnings.warn(
+ f'Inference directory "{self.inference_nonideality_dir()}" already exists. Skipping...'
+ )
+ continue
+ inference = self.inferences[self.inference_idx]
+ for _ in range(self.training.num_repeats):
+ for _ in range(inference.num_repeats):
+ network.infer(self)
+ inference.repeat_idx += 1
+
+ inference.repeat_idx = 0
+ self.training.repeat_idx += 1
+
+ self.training.repeat_idx = 0
+ self.inference_idx = 0
+ self.compute_power = False
diff --git a/awarememristor/training/network.py b/awarememristor/training/network.py
new file mode 100644
index 0000000..d8243df
--- /dev/null
+++ b/awarememristor/training/network.py
@@ -0,0 +1,63 @@
+import os
+import pickle
+
+import tensorflow as tf
+
+import awarememristor.training.callbacks as callbacks_
+from awarememristor.training.architecture import get_model
+
+
+def train(iterator, callbacks: list[callbacks_.Callback]) -> None:
+ """Trains using iterator settings and saves information once done."""
+ os.makedirs(iterator.weights_dir(), exist_ok=True)
+
+ validation_data = None
+ num_checkpoint_callbacks = 0
+ for callback in callbacks:
+ if isinstance(callback, callbacks_.Checkpoint):
+ num_checkpoint_callbacks += 1
+ if isinstance(callback, callbacks_.StandardCheckpoint):
+ validation_data = iterator.data("validation")
+
+ if num_checkpoint_callbacks != 1:
+ raise ValueError("One checkpoint callback must be supplied during training!")
+
+ model = get_model(iterator)
+
+ history = model.fit(
+ iterator.data("training"),
+ validation_data=validation_data,
+ verbose=2,
+ epochs=iterator.training.num_epochs,
+ callbacks=callbacks,
+ )
+
+ info = {
+ "history": history.history,
+ "validation_split": iterator.training.validation_split,
+ "batch_size": iterator.training.batch_size,
+ "callback_infos": {},
+ }
+ for callback in callbacks:
+ if isinstance(callback, callbacks_.MemristiveCallback):
+ if callback.name() in info["callback_infos"]:
+ raise KeyError(f'Callback "{callback.name()}" already exists!')
+ info["callback_infos"][callback.name()] = callback.info()
+
+ with open(iterator.info_path(), "wb") as handle:
+ pickle.dump(info, handle)
+
+
+def infer(iterator) -> None:
+ """Performs inference using iterator settings and saves metrics to separate files."""
+ os.makedirs(iterator.inference_repeat_dir(), exist_ok=True)
+
+ model = get_model(iterator)
+
+ score = model.evaluate(iterator.data("testing"), verbose=0)
+
+ print(f"Test loss: {score[0]:.4f}\nTest accuracy: {score[1]:.4f}")
+
+ for var, path in zip(score, [iterator.loss_path(), iterator.accuracy_path()]):
+ with open(path, mode="a", encoding="utf-8"):
+ tf.print(var, output_stream=f"file://{path}")
diff --git a/awarememristor/training/utils.py b/awarememristor/training/utils.py
new file mode 100644
index 0000000..f91c429
--- /dev/null
+++ b/awarememristor/training/utils.py
@@ -0,0 +1,43 @@
+import tensorflow as tf
+
+
+def _compute_device_power(V: tf.Tensor, I_ind: tf.Tensor) -> tf.Tensor:
+ """Compute power dissipated by individual devices in a crossbar.
+
+ Args:
+ V: Voltages in shape `p x m` with `p` examples applied across `m` word lines.
+ I_ind: Currents in shape `p x m x n` generated by the individual devices in crossbar with
+ `m` word lines and `n` bit lines.
+
+ Returns:
+ Power in shape `p x m x n`.
+ """
+ # $P = |V| |I|$ for individual devices. All devices in the same word line
+ # of the crossbar (row of G) are applied with the same voltage.
+ P_ind = tf.einsum("ij,ijk->ijk", tf.math.abs(V), tf.math.abs(I_ind))
+
+ return P_ind
+
+
+def compute_avg_crossbar_power(V: tf.constant, I_ind: tf.constant) -> float:
+ """Compute average power dissipated by a crossbar.
+
+ Args:
+ V: Voltages in shape `p x m` with `p` examples applied across `m` word lines.
+ I_ind: Currents in shape `p x m x n` generated by the individual devices in crossbar with
+ `m` word lines and `n` bit lines.
+
+ Returns:
+ Average power dissipated by a crossbar.
+ """
+ P = _compute_device_power(V, I_ind)
+ P_sum = tf.math.reduce_sum(P)
+ # To get average power consumption **per crossbar** we divide by number of examples.
+ P_avg = P_sum / tf.cast(tf.shape(V)[0], tf.float32)
+
+ return P_avg
+
+
+def normalize_img(image: tf.constant, label: str):
+ """Normalize images: `uint8` -> `float32`."""
+ return tf.cast(image, tf.float32) / 255.0, label
diff --git a/crossbar/__init__.py b/crossbar/__init__.py
deleted file mode 100644
index 2004578..0000000
--- a/crossbar/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-import crossbar.map
-import crossbar.nonlinear_IV
diff --git a/crossbar/map.py b/crossbar/map.py
deleted file mode 100644
index 51ba6ff..0000000
--- a/crossbar/map.py
+++ /dev/null
@@ -1,165 +0,0 @@
-import tensorflow as tf
-
-
-def I_to_y(I, k_V, max_weight, G_max, G_min):
- """Converts output currents of a dot-product engine onto synaptic layer inputs.
-
- Parameters
- ----------
- I : ndarray
- Output currents of shape `p x 2n`
- k_V : float
- Voltage scaling factor.
- max_weight : float
- Assumed maximum weight.
- G_max : float
- Maximum conductance of electroformed memristors.
- G_min : float
- Minimum conductance of electroformed memristors.
-
- Returns
- ----------
- y : ndarray
- Outputs of shape `p x n` of a synaptic layer implemented using
- memristive crossbars.
- """
- I_total = I[:, 0::2] - I[:, 1::2]
- y = I_total_to_y(I_total, k_V, max_weight, G_max, G_min)
- return y
-
-
-def I_total_to_y(I_total, k_V, max_weight, G_max, G_min):
- """Converts total output currents of a dot-product engine onto synaptic layer
- inputs.
-
- Parameters
- ----------
- I_total : ndarray
- Total output currents of shape `p x n`
- k_V : float
- Voltage scaling factor.
- max_weight : float
- Assumed maximum weight.
- G_max : float
- Maximum conductance of electroformed memristors.
- G_min : float, optional
- Minimum conductance of electroformed memristors.
-
- Returns
- ----------
- y : ndarray
- Outputs of shape `p x n` of a synaptic layer implemented using
- memristive crossbars.
- """
- k_G = compute_k_G(max_weight, G_max, G_min)
- k_I = compute_k_I(k_V, k_G)
- y = I_total/k_I
- return y
-
-
-def clip_weights(weights, max_weight):
- """Clips weights below 0 and above max_weight.
-
- Parameters
- ----------
- weights : ndarray
- Synaptic weights.
- max_weight : float
- Assumed maximum weight.
-
- Returns
- ----------
- new_weights : ndarray
- Clipped weights.
- """
- weights = tf.clip_by_value(weights, 0.0, max_weight)
-
- return weights
-
-
-def compute_k_G(max_weight, G_max, G_min):
- """Computes conductance scaling factor.
-
- Parameters
- ----------
- max_weight : float
- Assumed maximum weight.
- G_max : float
- Maximum conductance of electroformed memristors.
- G_min : float, optional
- Minimum conductance of electroformed memristors.
-
- Returns
- ----------
- float
- Conductance scaling factor.
- """
- k_G = (G_max-G_min)/max_weight
-
- return k_G
-
-
-def compute_k_I(k_V, k_G):
- """Computes current scaling factor.
-
- Parameters
- ----------
- k_V : float
- Voltage scaling factor.
- k_G : float
- Conductance scaling factor.
-
- Returns
- ----------
- float
- Current scaling factor.
- """
- return k_V*k_G
-
-
-def x_to_V(x, k_V):
- """Maps inputs (to a synaptic layer) onto voltages.
-
- Parameters
- ----------
- x : ndarray
- Synaptic inputs.
- k_V : float
- Voltage scaling factor.
-
- Returns
- ----------
- ndarray
- Voltages.
- """
- return k_V*x
-
-
-def w_params_to_G(weight_params, max_weight, G_min, G_max):
- """Maps weight parameters onto conductances.
-
- Parameters
- ----------
- weights_params : ndarray
- Weight parameters of shape `m x 2n`. These are used to
- train each conductance (instead of pair of conductances)
- directly.
- max_weight : float
- Assumed maximum weight.
- G_min : float
- Minimum conductance of electroformed memristors.
- G_max : float
- Maximum conductance of electroformed memristors.
-
- Returns
- ----------
- ndarray
- Conductances of shape `m x 2n`.
- """
- weights_params = clip_weights(weight_params, max_weight)
-
- k_G = compute_k_G(max_weight, G_max, G_min)
- G = k_G*weight_params + G_min
-
- return G
-
diff --git a/crossbar/nonlinear_IV.py b/crossbar/nonlinear_IV.py
deleted file mode 100644
index 4cbbb2f..0000000
--- a/crossbar/nonlinear_IV.py
+++ /dev/null
@@ -1,97 +0,0 @@
-import tensorflow as tf
-
-
-def compute_I(V, G, V_ref, G_min, G_max, n_avg, n_std=tf.constant(0.0)):
- """Computes output currents of a crossbar consisting of devices suffering
- from I/V non-linearities.
-
- Parameters
- ----------
- V : ndarray
- Voltages of shape `p x m`.
- G : ndarray
- Conductances of shape `m x n`.
- V_ref :
- Reference voltage values of length r (in increasing order) or voltage
- at which the devices behave Ohmically.
- G_min : float
- Minimum conductance of electroformed memristors.
- G_max : float
- Maximum conductance of electroformed memristors.
- n_avg : tf.constant
- Average value of non-linearity parameter.
- n_std: tf.constant, optional
- Standard deviation of non-linearity parameter.
-
- Returns
- ----------
- I : ndarray
- Output currents of shape `p x n`.
- I_ind : ndarray
- Currents of shape `p x m x n` produced by each of the conductances in
- the crossbar array.
- """
- I_ind = compute_currents(G_min, G_max, n_avg, V_ref, G, V, n_std=n_std)
- I = add_I_BL(I_ind)
-
- return I, I_ind
-
-
-def compute_currents(G_min, G_max, n_avg, V_ref, G, V, n_std=tf.constant(0.0)):
- """Compute current values by modelling I-V behaviour using nonlinearity
- parameter.
-
- Parameters
- ----------
- G_min : float
- Minimum conductance of electroformed memristors.
- G_max : float
- Maximum conductance of electroformed memristors.
- n_avg : tf.constant
- Average value of non-linearity parameter.
- V_ref : float
- Voltage at which the devices behave Ohmically.
- G : ndarray
- Conductances of shape `m x n`.
- V : ndarray
- Voltages of shape `p x m`.
- n_std: tf.constant, optional
- Standard deviation of non-linearity parameter.
-
- Returns
- ----------
- I : ndarray
- Currents of shape `p x m x n` produced by each of the conductances in
- the crossbar array.
- """
- epsilon = 1e-4
-
- exponent = tf.math.log((tf.math.abs(V)+epsilon)/V_ref)/tf.math.log(2.0)
-
- if n_std == tf.constant(0.0):
- n = n_avg
- I = tf.sign(tf.expand_dims(V, axis=-1)) * V_ref * tf.expand_dims(G, axis=0) * n ** (tf.expand_dims(exponent, axis=-1))
- else:
- n = tf.random.normal(G.get_shape().as_list(), mean=n_avg, stddev=n_std, dtype=tf.float32)
- I = tf.sign(tf.expand_dims(V, axis=-1)) * V_ref * tf.expand_dims(G, axis=0) * tf.expand_dims(n, axis=0) ** (tf.expand_dims(exponent, axis=-1))
-
- return I
-
-
-def add_I_BL(I_ind):
- """Adds currents along the bit lines.
-
- Parameters
- ----------
- I_ind : ndarray
- Currents of shape `p x m x n` produced by each of the conductances in
- the crossbar array.
-
- Returns
- ----------
- I : ndarray
- Output currents of shape `p x n`.
- """
- I = tf.math.reduce_sum(I_ind, axis=1)
- return I
-
diff --git a/memristor_utils.py b/memristor_utils.py
deleted file mode 100755
index 86f2327..0000000
--- a/memristor_utils.py
+++ /dev/null
@@ -1,199 +0,0 @@
-import numpy as np
-
-import tensorflow as tf
-from tensorflow.keras import backend as K
-from tensorflow.keras.layers import Layer
-
-# Import memristor non-idealities
-import crossbar
-
-
-def disturbed_outputs_i_v_non_linear(x, weights, group_idx=None, log_dir_full_path=None):
- if group_idx is None:
- group_idx = 0
-
- max_weight = tf.math.reduce_max(tf.math.abs(weights))
- V_ref = tf.constant(0.25)
-
- G_min_lst = tf.constant([1/983.3, 1/10170, 1/1401000])
- G_max_lst = tf.constant([1/281.3, 1/2826, 1/385700])
- n_avg_lst = tf.constant([2.132, 2.596, 2.986])
- n_std_lst = tf.constant([0.095, 0.088, 0.378])
-
- G_min = G_min_lst[group_idx]
- G_max = G_max_lst[group_idx]
- n_avg= n_avg_lst[group_idx]
- n_std= n_std_lst[group_idx]
-
- # Mapping weights onto conductances.
- G = crossbar.map.w_params_to_G(weights, max_weight, G_min, G_max)
-
- k_V = 2*V_ref
-
- # Mapping inputs onto voltages.
- V = crossbar.map.x_to_V(x, k_V)
-
- # Computing currents
- I, I_ind = crossbar.nonlinear_IV.compute_I(
- V, G, V_ref, G_min, G_max, n_avg, n_std=n_std)
- if log_dir_full_path is not None:
- log_file_full_path = "{}/power.csv".format(log_dir_full_path)
- open(log_file_full_path, "a").close()
- P_avg = compute_avg_crossbar_power(V, I_ind)
- tf.print(P_avg, output_stream="file://{}".format(log_file_full_path))
-
- # Converting to outputs.
- y_disturbed = crossbar.map.I_to_y(I, k_V, max_weight, G_max, G_min)
-
- tf.debugging.assert_all_finite(
- y_disturbed, "nan in outputs", name=None
- )
-
- return y_disturbed
-
-
-def compute_device_power(V, I_ind):
- """Computes power dissipated by individual devices in a crossbar.
-
- Parameters
- ----------
- V : tf.Tensor
- Voltages in shape (p x m) with p examples applied across m
- word lines.
- I_ind : tf.Tensor
- Currents in shape (p x m x n) generated by the individual
- devices in crossbar with m word lines and n bit lines.
-
- Returns
- ----------
- P_ind : tf.Tensor
- Power in shape (p x m x n).
- """
- # $P = VI$ for individual devices. All devices in the same word
- # line of the crossbar (row of G) are applied with the same voltage.
- P_ind = tf.einsum('ij,ijk->ijk', V, I_ind)
-
- return P_ind
-
-
-def compute_avg_crossbar_power(V, I_ind):
- """Computes average power dissipated by a crossbar.
-
- Parameters
- ----------
- V : tf.Tensor
- Voltages in shape (p x m) with p examples applied across m
- word lines.
- I_ind : tf.Tensor
- Currents in shape (p x m x n) generated by the individual
- devices in crossbar with m word lines and n bit lines.
-
- Returns
- ----------
- P_avg : tf.Tensor
- Average power dissipated by a crossbar.
- """
- P = compute_device_power(V, I_ind)
- P_sum = tf.math.reduce_sum(P)
- # To get average power consumption **per crossbar** we divide by
- # number of examples.
- P_avg = P_sum/tf.cast(tf.shape(V)[0], tf.float32)
-
- return P_avg
-
-
-class memristor_dense(Layer):
- def __init__(self, n_in, n_out, group_idx=None, is_regularized=True, log_dir_full_path=None, **kwargs):
- self.n_in=n_in
- self.n_out=n_out
- self.group_idx = group_idx
- self.is_regularized = is_regularized
- self.log_dir_full_path = log_dir_full_path
- super(memristor_dense, self).__init__(**kwargs)
-
- # Adding this funcion removes an issue with custom layer checkpoint
- def get_config(self):
-
- config = super().get_config().copy()
- config.update({
- 'n_in': self.n_in,
- 'n_out': self.n_out,
- })
- return config
-
- # Create trainable weights and biases
- def build(self, input_shape):
- stdv=1/np.sqrt(self.n_in)
- kwargs = {}
- if self.is_regularized:
- reg_gamma = 1e-4
- kwargs["regularizer"] = tf.keras.regularizers.l1(reg_gamma)
-
- self.w_pos = self.add_weight(
- shape=(self.n_in,self.n_out),
- initializer=tf.keras.initializers.RandomNormal(mean=0.5, stddev=stdv),
- name="weights_pos",
- trainable=True,
- **kwargs
- )
-
- self.w_neg = self.add_weight(
- shape=(self.n_in,self.n_out),
- initializer=tf.keras.initializers.RandomNormal(mean=0.5, stddev=stdv),
- name="weights_neg",
- trainable=True,
- **kwargs
- )
-
- self.b_pos = self.add_weight(
- shape=(self.n_out,),
- initializer=tf.keras.initializers.Constant(value=0.5),
- name="biasess_pos",
- trainable=True,
- **kwargs
- )
-
- self.b_neg = self.add_weight(
- shape=(self.n_out,),
- initializer=tf.keras.initializers.Constant(value=0.5),
- name="biasess_neg",
- trainable=True,
- **kwargs
- )
-
-
- def call(self, x,mask=None):
-
- # Clip inputs within 0 and 1
- #x = tf.clip_by_value(x, 0.0, 1.0)
-
- # Non-ideality-aware training
- bias_pos = tf.expand_dims(self.b_pos, axis=0)
- bias_neg = tf.expand_dims(self.b_neg, axis=0)
- combined_weights_pos = tf.concat([self.w_pos, bias_pos], 0)
- combined_weights_neg = tf.concat([self.w_neg, bias_neg], 0)
- ones = tf.ones([tf.shape(x)[0], 1])
- inputs = tf.concat([x, ones], 1)
-
- is_aware = True
- if is_aware:
- # Interleave positive and negative weights
- combined_weights = tf.reshape(
- tf.concat([combined_weights_pos[...,tf.newaxis], combined_weights_neg[...,tf.newaxis]], axis=-1),
- [tf.shape(combined_weights_pos)[0],-1])
-
- self.out = self.apply_output_disturbance(inputs, combined_weights)
- else:
- self.out = K.dot(x, self.w) + self.b
-
- return self.out
-
- def apply_output_disturbance(self, inputs, weights):
- disturbed_outputs = disturbed_outputs_i_v_non_linear(inputs, weights, group_idx=self.group_idx, log_dir_full_path=self.log_dir_full_path)
- return disturbed_outputs
-
- def get_output_shape_for(self,input_shape):
- return (input_shape[0], self.n_out)
- def compute_output_shape(self,input_shape):
- return (input_shape[0], self.n_out)
-
diff --git a/model_architectures.py b/model_architectures.py
deleted file mode 100644
index 1b1bfe0..0000000
--- a/model_architectures.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from tensorflow.keras.models import Sequential, Model
-from tensorflow.keras.layers import Activation
-from memristor_utils import *
-
-
-batch_norm_eps=1e-4
-batch_norm_momentum=0.9
-
-def get_model(dataset, batch_size, group_idx=None, is_regularized=True, log_dir_full_path=None):
- if dataset=='MNIST':
- model=Sequential()
- model.add(memristor_dense(n_in=784, n_out=25, group_idx=group_idx, is_regularized=is_regularized, log_dir_full_path=log_dir_full_path, input_shape=[784]))
- # We will try to introduce non-linearities using dense layers.
- model.add(Activation('sigmoid'))
- model.add(memristor_dense(n_in=int(model.output.get_shape()[1]),n_out=10, group_idx=group_idx, log_dir_full_path=log_dir_full_path, is_regularized=is_regularized))
- model.add(Activation('softmax'))
- else:
- raise("Dataset {} is not recognised!".format(dataset))
- return model
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..b3f3cec
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,5 @@
+[tool.black]
+line-length = 100
+
+[tool.isort]
+known_first_party = "awarememristor"
diff --git a/reproduce_paper.py b/reproduce_paper.py
new file mode 100644
index 0000000..29699a8
--- /dev/null
+++ b/reproduce_paper.py
@@ -0,0 +1,42 @@
+"""Reproducing simulations and plots in the paper.
+
+Seeds for the simulations were not provided, thus reproduced data might differ
+*slightly*. The qualitative results should remain the same because multiple
+training and inference iterations were used.
+
+SiO_x data will be downloaded automatically. To get access to Ta/HfO2 data,
+please email [Dovydas Joksas](mailto:dovydas.joksas.15@ucl.ac.uk).
+"""
+from awarememristor import simulations
+from awarememristor.plotting import figures, supporting_figures
+
+# Simulations
+simulations.ideal.main()
+simulations.iv_nonlinearity.main()
+simulations.iv_nonlinearity_and_stuck_on.main()
+simulations.iv_nonlinearity_cnn.main()
+simulations.memristive_validation.main()
+simulations.stuck_distribution.main()
+simulations.stuck_off.main()
+simulations.weight_implementation.main()
+simulations.nonideality_agnosticism.main()
+
+# Figures in the main text
+figures.experimental_data()
+figures.iv_nonlinearity_training()
+figures.iv_nonlinearity_inference()
+figures.iv_nonlinearity_cnn()
+figures.weight_implementation()
+figures.memristive_validation()
+figures.nonideality_agnosticism()
+
+# Figures in the Supporting Information
+supporting_figures.all_iv_curves_full_range()
+supporting_figures.switching()
+supporting_figures.iv_nonlinearity_training()
+supporting_figures.weight_implementation_standard_weights_training()
+supporting_figures.weight_implementation_double_weights_training()
+supporting_figures.memristive_validation_training()
+supporting_figures.stuck_off_training()
+supporting_figures.high_iv_nonlinearity_and_stuck_on_training()
+supporting_figures.stuck_distribution_training()
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..ef30583
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,12 @@
+h5py==3.1.0
+KDEpy==1.1.0
+matplotlib==3.4.3
+numpy==1.21.3
+pandas==1.3.4
+pytest==6.2.5
+requests==2.26.0
+scipy==1.7.1
+tensorflow==2.7.0
+tensorflow_datasets==4.4.0
+tensorflow_gpu==2.6.0
+tensorflow_probability==0.14.1
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/crossbar/__init__.py b/tests/crossbar/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/crossbar/test_ideal.py b/tests/crossbar/test_ideal.py
new file mode 100644
index 0000000..ee61d6c
--- /dev/null
+++ b/tests/crossbar/test_ideal.py
@@ -0,0 +1,176 @@
+"""
+Tests of functions of crossbar.nonlinear_IV
+"""
+import pytest
+import tensorflow as tf
+
+from awarememristor.crossbar import ideal
+from tests import utils
+
+# In the ideal case, the bit-line outputs should represent the vector-matrix
+# product of voltages and conductances.
+compute_I_testdata = [
+ (
+ {
+ "G": tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ [9.0, 10.0, 11.0, 12.0],
+ ]
+ ),
+ "V": tf.constant(
+ [
+ [1.0, 0.0, -0.5],
+ ]
+ ),
+ },
+ tf.constant(
+ [
+ [
+ 1.0 * 1.0 + 0.0 * 5.0 + (-0.5) * 9.0,
+ 1.0 * 2.0 + 0.0 * 6.0 + (-0.5) * 10.0,
+ 1.0 * 3.0 + 0.0 * 7.0 + (-0.5) * 11.0,
+ 1.0 * 4.0 + 0.0 * 8.0 + (-0.5) * 12.0,
+ ],
+ ]
+ ),
+ ),
+ (
+ {
+ "G": tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ [9.0, 10.0, 11.0, 12.0],
+ ]
+ ),
+ "V": tf.constant(
+ [
+ [1.0, 0.0, -0.4],
+ [0.0, 2.0, 0.0],
+ ]
+ ),
+ },
+ tf.constant(
+ [
+ [
+ 1.0 * 1.0 + 0.0 * 5.0 + (-0.4) * 9.0,
+ 1.0 * 2.0 + 0.0 * 6.0 + (-0.4) * 10.0,
+ 1.0 * 3.0 + 0.0 * 7.0 + (-0.4) * 11.0,
+ 1.0 * 4.0 + 0.0 * 8.0 + (-0.4) * 12.0,
+ ],
+ [
+ 0.0 * 1.0 + 2.0 * 5.0 + 0.0 * 9.0,
+ 0.0 * 2.0 + 2.0 * 6.0 + 0.0 * 10.0,
+ 0.0 * 3.0 + 2.0 * 7.0 + 0.0 * 11.0,
+ 0.0 * 4.0 + 2.0 * 8.0 + 0.0 * 12.0,
+ ],
+ ]
+ ),
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", compute_I_testdata)
+def test_compute_I(args, expected):
+ I = ideal.compute_I(**args)
+ utils.assert_tf_approx(I, expected)
+
+
+compute_I_all_testdata = [
+ (
+ {
+ "G": tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ [9.0, 10.0, 11.0, 12.0],
+ ]
+ ),
+ "V": tf.constant(
+ [
+ [0.2, -0.1, 0.0],
+ ]
+ ),
+ },
+ [
+ tf.constant(
+ [
+ [
+ 0.2 * 1.0 + (-0.1) * 5.0 + 0.0 * 9.0,
+ 0.2 * 2.0 + (-0.1) * 6.0 + 0.0 * 10.0,
+ 0.2 * 3.0 + (-0.1) * 7.0 + 0.0 * 11.0,
+ 0.2 * 4.0 + (-0.1) * 8.0 + 0.0 * 12.0,
+ ],
+ ]
+ ),
+ tf.constant(
+ [
+ [
+ [0.2 * 1.0, 0.2 * 2.0, 0.2 * 3.0, 0.2 * 4.0],
+ [(-0.1) * 5.0, (-0.1) * 6.0, (-0.1) * 7.0, (-0.1) * 8.0],
+ [0.0 * 9.0, 0.0 * 10.0, 0.0 * 11.0, 0.0 * 12.0],
+ ],
+ ]
+ ),
+ ],
+ ),
+ (
+ {
+ "G": tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ [9.0, 10.0, 11.0, 12.0],
+ ]
+ ),
+ "V": tf.constant(
+ [
+ [0.2, -0.1, 0.0],
+ [0.2, -0.1, 0.0],
+ ]
+ ),
+ },
+ [
+ tf.constant(
+ [
+ [
+ 0.2 * 1.0 + (-0.1) * 5.0 + 0.0 * 9.0,
+ 0.2 * 2.0 + (-0.1) * 6.0 + 0.0 * 10.0,
+ 0.2 * 3.0 + (-0.1) * 7.0 + 0.0 * 11.0,
+ 0.2 * 4.0 + (-0.1) * 8.0 + 0.0 * 12.0,
+ ],
+ [
+ 0.2 * 1.0 + (-0.1) * 5.0 + 0.0 * 9.0,
+ 0.2 * 2.0 + (-0.1) * 6.0 + 0.0 * 10.0,
+ 0.2 * 3.0 + (-0.1) * 7.0 + 0.0 * 11.0,
+ 0.2 * 4.0 + (-0.1) * 8.0 + 0.0 * 12.0,
+ ],
+ ]
+ ),
+ tf.constant(
+ [
+ [
+ [0.2 * 1.0, 0.2 * 2.0, 0.2 * 3.0, 0.2 * 4.0],
+ [(-0.1) * 5.0, (-0.1) * 6.0, (-0.1) * 7.0, (-0.1) * 8.0],
+ [0.0 * 9.0, 0.0 * 10.0, 0.0 * 11.0, 0.0 * 12.0],
+ ],
+ [
+ [0.2 * 1.0, 0.2 * 2.0, 0.2 * 3.0, 0.2 * 4.0],
+ [(-0.1) * 5.0, (-0.1) * 6.0, (-0.1) * 7.0, (-0.1) * 8.0],
+ [0.0 * 9.0, 0.0 * 10.0, 0.0 * 11.0, 0.0 * 12.0],
+ ],
+ ]
+ ),
+ ],
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", compute_I_all_testdata)
+def test_compute_I_all(args, expected):
+ I_exp, I_ind_exp = expected
+ I, I_ind = ideal.compute_I_all(**args)
+ utils.assert_tf_approx(I, I_exp)
+ utils.assert_tf_approx(I_ind, I_ind_exp)
diff --git a/tests/crossbar/test_map.py b/tests/crossbar/test_map.py
new file mode 100644
index 0000000..14633a7
--- /dev/null
+++ b/tests/crossbar/test_map.py
@@ -0,0 +1,201 @@
+"""
+Tests of functions of crossbar.nonlinear_IV
+"""
+import pytest
+import tensorflow as tf
+
+from awarememristor import crossbar
+from tests import utils
+
+double_w_to_G_testdata = [
+ (
+ {
+ "double_w": tf.constant(
+ [
+ [3.75, 2.5, 5.0, 2.5],
+ [2.5, 0.0, 0.0, 1.25],
+ ]
+ ),
+ "G_off": tf.constant(2.0),
+ "G_on": tf.constant(10.0),
+ },
+ [
+ tf.constant(
+ [
+ [8.0, 6.0, 10.0, 6.0],
+ [6.0, 2.0, 2.0, 4.0],
+ ]
+ ),
+ tf.constant(5.0),
+ ],
+ ),
+ (
+ {
+ "double_w": tf.constant(
+ [
+ [8.0, 0.0],
+ [2.0, 0.0],
+ ]
+ ),
+ "G_off": tf.constant(1.0),
+ "G_on": tf.constant(3.0),
+ },
+ [
+ tf.constant(
+ [
+ [3.0, 1.0],
+ [1.5, 1.0],
+ ]
+ ),
+ tf.constant(8.0),
+ ],
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", double_w_to_G_testdata)
+def test_double_w_to_G(args, expected):
+ G_exp, max_weight_exp = expected
+ G, max_weight = crossbar.map.double_w_to_G(**args)
+ utils.assert_tf_approx(G, G_exp)
+ utils.assert_tf_approx(max_weight, max_weight_exp)
+
+
+w_to_G_testdata = [
+ (
+ {
+ "weights": tf.constant(
+ [
+ [3.75, 2.5, -5.0],
+ [-2.5, 0.0, 1.25],
+ ]
+ ),
+ "G_off": tf.constant(2.0),
+ "G_on": tf.constant(10.0),
+ },
+ [
+ tf.constant(
+ [
+ [8.0, 2.0, 6.0, 2.0, 2.0, 10.0],
+ [2.0, 6.0, 2.0, 2.0, 4.0, 2.0],
+ ]
+ ),
+ tf.constant(5.0),
+ ],
+ ),
+ (
+ {
+ "weights": tf.constant(
+ [
+ [4.0],
+ [-2.0],
+ ]
+ ),
+ "G_off": tf.constant(3.0),
+ "G_on": tf.constant(5.0),
+ },
+ [
+ tf.constant(
+ [
+ [5.0, 3.0],
+ [3.0, 4.0],
+ ]
+ ),
+ tf.constant(4.0),
+ ],
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", w_to_G_testdata)
+def test_w_to_G(args, expected):
+ G_exp, max_weight_exp = expected
+ G, max_weight = crossbar.map.w_to_G(**args)
+ utils.assert_tf_approx(G, G_exp)
+ utils.assert_tf_approx(max_weight, max_weight_exp)
+
+
+# Test whether ideal crossbars compute vector-matrix products correctly.
+ideal_dpe_testdata = [
+ (
+ {
+ "x": tf.constant(
+ [
+ [1.0, -1.0],
+ [2.0, 0.0],
+ [0.0, -0.5],
+ ]
+ ),
+ "w": tf.constant(
+ [
+ [1.0, -2.0, 3.0, -4.0],
+ [5.0, 6.0, -7.0, 8.0],
+ ]
+ ),
+ },
+ tf.constant(
+ [
+ [
+ 1.0 * 1.0 + (-1.0) * 5.0,
+ 1.0 * (-2.0) + (-1.0) * 6.0,
+ 1.0 * 3.0 + (-1.0) * (-7.0),
+ 1.0 * (-4.0) + (-1.0) * 8.0,
+ ],
+ [
+ 2.0 * 1.0 + 0.0 * 5.0,
+ 2.0 * (-2.0) + 0.0 * 6.0,
+ 2.0 * 3.0 + 0.0 * (-7.0),
+ 2.0 * (-4.0) + 0.0 * 8.0,
+ ],
+ [
+ 0.0 * 1.0 + (-0.5) * 5.0,
+ 0.0 * (-2.0) + (-0.5) * 6.0,
+ 0.0 * 3.0 + (-0.5) * (-7.0),
+ 0.0 * (-4.0) + (-0.5) * 8.0,
+ ],
+ ]
+ ),
+ )
+]
+
+
+@pytest.mark.parametrize(
+ "G_off,G_on",
+ [
+ (tf.constant(0.1), tf.constant(0.2)),
+ (tf.constant(0.2), tf.constant(5.0)),
+ ],
+)
+@pytest.mark.parametrize(
+ "V_ref",
+ [
+ tf.constant(0.2),
+ tf.constant(1.0),
+ ],
+)
+@pytest.mark.parametrize(
+ "is_ideal",
+ [
+ True,
+ False,
+ ],
+)
+@pytest.mark.parametrize("args,expected", ideal_dpe_testdata)
+def test_ideal_dpe(args, expected, G_off, G_on, V_ref, is_ideal):
+ x = args["x"]
+ w = args["w"]
+
+ k_V = 2 * V_ref
+ V = crossbar.map.x_to_V(x, k_V)
+
+ G, max_weight = crossbar.map.w_to_G(w, G_off, G_on)
+
+ if is_ideal:
+ I = crossbar.ideal.compute_I(V, G)
+ else:
+ nonideality = crossbar.nonidealities.IVNonlinearity(V_ref, 2.0, 1e-10)
+ I, _ = nonideality.compute_I(V, G)
+
+ y = crossbar.map.I_to_y(I, k_V, max_weight, G_on, G_off)
+
+ utils.assert_tf_approx(y, expected)
diff --git a/tests/crossbar/test_nonidealities.py b/tests/crossbar/test_nonidealities.py
new file mode 100644
index 0000000..f46569f
--- /dev/null
+++ b/tests/crossbar/test_nonidealities.py
@@ -0,0 +1,349 @@
+import pytest
+import tensorflow as tf
+
+from awarememristor.crossbar import nonidealities
+from tests import utils
+
+# Only special case, i.e. when std = 0.0 for all entries.
+d2d_lognormal_testdata = [
+ (
+ (
+ 0.5,
+ 0.6,
+ 0.0,
+ 0.0,
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0],
+ [4.0, 5.0, 6.0],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0],
+ [4.0, 5.0, 6.0],
+ ]
+ ),
+ ),
+ (
+ (
+ 1.0,
+ 6.0,
+ 0.0,
+ 0.0,
+ tf.constant(
+ [
+ [1.0, 1.0, 1.0],
+ [1.0, 1.0, 1.0],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [1.0, 1.0, 1.0],
+ [1.0, 1.0, 1.0],
+ ]
+ ),
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", d2d_lognormal_testdata)
+def test_d2d_lognormal(args, expected):
+ G_off, G_on, R_on_log_std, R_off_log_std, G = args
+ nonideality = nonidealities.D2DLognormal(G_off, G_on, R_on_log_std, R_off_log_std)
+ result = nonideality.disturb_G(G)
+ utils.assert_tf_approx(result, expected)
+
+
+# I feel it is appropriate to use multiplication for expected tensors because
+# it is not the underlying operation that we are testing. Writing it out
+# reveals the logic behind the calculations that *should* take place - Dovydas
+iv_nonlinearity_I_ind_testdata = [
+ (
+ (
+ nonidealities.IVNonlinearity(1.0, 2.0, 1e-10),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ ]
+ ),
+ tf.constant(
+ [
+ [1.0, 0.0],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [
+ [1.0 * 1.0, 1.0 * 2.0, 1.0 * 3.0, 1.0 * 4.0],
+ [0.0 * 5.0, 0.0 * 6.0, 0.0 * 7.0, 0.0 * 8.0],
+ ],
+ ]
+ ),
+ ),
+ (
+ (
+ nonidealities.IVNonlinearity(2.0, 2.0, 1e-10),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ ]
+ ),
+ tf.constant(
+ [
+ [1.0, 0.5],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [
+ [1.0 * 1.0, 1.0 * 2.0, 1.0 * 3.0, 1.0 * 4.0],
+ [0.5 * 5.0, 0.5 * 6.0, 0.5 * 7.0, 0.5 * 8.0],
+ ],
+ ]
+ ),
+ ),
+ (
+ (
+ nonidealities.IVNonlinearity(0.5, 4.0, 1e-10),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ [9.0, 10.0, 11.0, 12.0],
+ ]
+ ),
+ tf.constant(
+ [
+ [0.0, 0.5, 1.0],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [
+ [0.0 * 1.0, 0.0 * 2.0, 0.0 * 3.0, 0.0 * 4.0],
+ # Baseline because V_ref = 0.5
+ [0.5 * 5.0, 0.5 * 6.0, 0.5 * 7.0, 0.5 * 8.0],
+ # Multiplying by additional factor of 4 because 1/0.5 = 2
+ # and n_avg = 4
+ [0.5 * 9.0 * 4, 0.5 * 10.0 * 4, 0.5 * 11.0 * 4, 0.5 * 12.0 * 4],
+ ],
+ ]
+ ),
+ ),
+ (
+ (
+ nonidealities.IVNonlinearity(0.2, 3.0, 1e-10),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ ]
+ ),
+ tf.constant(
+ [
+ [0.0, 0.2],
+ [0.1, 0.4],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [
+ [0.0 * 1.0, 0.0 * 2.0, 0.0 * 3.0, 0.0 * 4.0],
+ # Baseline because V_ref = 0.2
+ [0.2 * 5.0, 0.2 * 6.0, 0.2 * 7.0, 0.2 * 8.0],
+ ],
+ [
+ # Dividing by additional factor of 3 because 0.1/0.2 =
+ # 1/2 and n_avg = 3
+ [
+ 0.2 * 1.0 / 3.0,
+ 0.2 * 2.0 / 3.0,
+ 0.2 * 3.0 / 3.0,
+ 0.2 * 4.0 / 3.0,
+ ],
+ # Multiplying by additional factor of 3 because 0.4/0.2
+ # = 2 and n_avg = 3
+ [
+ 0.2 * 5.0 * 3.0,
+ 0.2 * 6.0 * 3.0,
+ 0.2 * 7.0 * 3.0,
+ 0.2 * 8.0 * 3.0,
+ ],
+ ],
+ ]
+ ),
+ ),
+ (
+ (
+ nonidealities.IVNonlinearity(0.5, 5.0, 1e-10),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ [9.0, 10.0, 11.0, 12.0],
+ [13.0, 14.0, 15.0, 16.0],
+ ]
+ ),
+ tf.constant(
+ [
+ [-0.5, -0.25, -1.0, 0.5],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [
+ # Baseline because V_ref = 0.5
+ [-0.5 * 1.0, -0.5 * 2.0, -0.5 * 3.0, -0.5 * 4.0],
+ # Dividing by additional factor of 5 because -0.25/-0.5
+ # = 1/2 and n_avg = 5
+ [
+ -0.5 * 5.0 / 5.0,
+ -0.5 * 6.0 / 5.0,
+ -0.5 * 7.0 / 5.0,
+ -0.5 * 8.0 / 5.0,
+ ],
+ # Multiplying by additional factor of 5 because
+ # -1.0/-0.5 = 1/2 and n_avg = 5
+ [
+ -0.5 * 9.0 * 5.0,
+ -0.5 * 10.0 * 5.0,
+ -0.5 * 11.0 * 5.0,
+ -0.5 * 12.0 * 5.0,
+ ],
+ # Baseline because V_ref = 0.5
+ [0.5 * 13.0, 0.5 * 14.0, 0.5 * 15.0, 0.5 * 16.0],
+ ],
+ ]
+ ),
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", iv_nonlinearity_I_ind_testdata)
+def test_iv_nonlinearity_I_ind(args, expected):
+ nonideality, G, V = args
+ _, result = nonideality.compute_I(V, G)
+ utils.assert_tf_approx(result, expected)
+
+
+iv_nonlinearity_I_testdata = [
+ (
+ (
+ nonidealities.IVNonlinearity(5.0, 2.0, 1e-10),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0, 4.0],
+ [5.0, 6.0, 7.0, 8.0],
+ [9.0, 10.0, 11.0, 12.0],
+ ]
+ ),
+ tf.constant(
+ [
+ [1.0, 0.0, -0.5],
+ [0.0, 0.25, 0.0],
+ ]
+ ),
+ ),
+ [
+ # With {n_avg = 2, n_std = 0} the bit-line outputs should
+ # represent the vector-matrix product of voltages and
+ # conductances.
+ tf.constant(
+ [
+ [
+ 1.0 * 1.0 + 0.0 * 5.0 + (-0.5) * 9.0,
+ 1.0 * 2.0 + 0.0 * 6.0 + (-0.5) * 10.0,
+ 1.0 * 3.0 + 0.0 * 7.0 + (-0.5) * 11.0,
+ 1.0 * 4.0 + 0.0 * 8.0 + (-0.5) * 12.0,
+ ],
+ [
+ 0.0 * 1.0 + 0.25 * 5.0 + 0.0 * 9.0,
+ 0.0 * 2.0 + 0.25 * 6.0 + 0.0 * 10.0,
+ 0.0 * 3.0 + 0.25 * 7.0 + 0.0 * 11.0,
+ 0.0 * 4.0 + 0.25 * 8.0 + 0.0 * 12.0,
+ ],
+ ]
+ ),
+ # With {n_avg = 2, n_std = 0} currents should be produced
+ # according to Ohm's law.
+ tf.constant(
+ [
+ [
+ [1.0 * 1.0, 1.0 * 2.0, 1.0 * 3.0, 1.0 * 4.0],
+ [0.0 * 5.0, 0.0 * 6.0, 0.0 * 7.0, 0.0 * 8.0],
+ [-0.5 * 9.0, -0.5 * 10.0, -0.5 * 11.0, -0.5 * 12.0],
+ ],
+ [
+ [0.0 * 1.0, 0.0 * 2.0, 0.0 * 3.0, 0.0 * 4.0],
+ [0.25 * 5.0, 0.25 * 6.0, 0.25 * 7.0, 0.25 * 8.0],
+ [0.0 * 9.0, 0.0 * 10.0, 0.0 * 11.0, 0.0 * 12.0],
+ ],
+ ]
+ ),
+ ],
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", iv_nonlinearity_I_testdata)
+def test_iv_nonlinearity_I(args, expected):
+ I_exp, I_ind_exp = expected
+ nonideality, G, V = args
+ I, I_ind = nonideality.compute_I(V, G)
+ utils.assert_tf_approx(I, I_exp)
+ utils.assert_tf_approx(I_ind, I_ind_exp)
+
+
+stuck_at_testdata = [
+ (
+ (
+ nonidealities.StuckAt(2.0, 1.0),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0],
+ [4.0, 5.0, 6.0],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [2.0, 2.0, 2.0],
+ [2.0, 2.0, 2.0],
+ ]
+ ),
+ ),
+ (
+ (
+ nonidealities.StuckAt(5.0, 0.0),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0],
+ [4.0, 5.0, 6.0],
+ ]
+ ),
+ ),
+ tf.constant(
+ [
+ [1.0, 2.0, 3.0],
+ [4.0, 5.0, 6.0],
+ ]
+ ),
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", stuck_at_testdata)
+def test_stuck_at(args, expected):
+ nonideality, G = args
+ result = nonideality.disturb_G(G)
+ utils.assert_tf_approx(result, expected)
diff --git a/tests/crossbar/test_utils.py b/tests/crossbar/test_utils.py
new file mode 100644
index 0000000..e3d35ed
--- /dev/null
+++ b/tests/crossbar/test_utils.py
@@ -0,0 +1,36 @@
+"""
+Tests of functions of crossbar.faulty_devices
+"""
+import pytest
+import tensorflow as tf
+
+from awarememristor.crossbar import utils as crossbar_utils
+from tests import utils
+
+random_bool_tensor_testdata = [
+ (
+ {
+ "shape": [2, 3],
+ "prob_true": 0.0,
+ },
+ tf.constant(
+ [
+ [False, False, False],
+ [False, False, False],
+ ]
+ ),
+ ),
+ (
+ {
+ "shape": [4],
+ "prob_true": 1.0,
+ },
+ tf.constant([True, True, True, True]),
+ ),
+]
+
+
+@pytest.mark.parametrize("args,expected", random_bool_tensor_testdata)
+def test_random_bool_tensor(args, expected):
+ result = crossbar_utils.random_bool_tensor(**args)
+ utils.assert_tf_bool_equal(result, expected)
diff --git a/tests/training/__init__.py b/tests/training/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/training/test_iterator.py b/tests/training/test_iterator.py
new file mode 100644
index 0000000..be5da60
--- /dev/null
+++ b/tests/training/test_iterator.py
@@ -0,0 +1,69 @@
+"""
+Tests of functions of training.iterator
+"""
+# pylint: disable=missing-function-docstring
+import pytest
+
+from awarememristor.crossbar import nonidealities
+from awarememristor.training import iterator
+
+nonideality_label_testdata = [
+ (
+ iterator.Stage(),
+ "ideal",
+ ),
+ (
+ iterator.Stage(
+ nonidealities=[nonidealities.IVNonlinearity(0.25, 1.53, 0.625)],
+ ),
+ "IVNL:1.53_0.625",
+ ),
+ (
+ iterator.Stage(
+ nonidealities=[nonidealities.StuckAt(1.20, 0.6341)],
+ ),
+ "Stuck:1.2_0.634",
+ ),
+ (
+ iterator.Stage(
+ nonidealities=[
+ nonidealities.IVNonlinearity(0.25, 1.530, 0.123),
+ nonidealities.StuckAt(1.2344, 0.06341),
+ ]
+ ),
+ "IVNL:1.53_0.123+Stuck:1.23_0.0634",
+ ),
+]
+
+
+@pytest.mark.parametrize("nonideal_instance,expected", nonideality_label_testdata)
+def test_nonideality_label(nonideal_instance, expected):
+ result = nonideal_instance.nonideality_label()
+ assert result == expected
+
+
+nonidealities_exception_testdata = [
+ (
+ [
+ nonidealities.IVNonlinearity(0.25, 3.1, 0.1203),
+ nonidealities.StuckAt(1.23, 0.0009),
+ nonidealities.StuckAt(4.5, 0.1),
+ ],
+ "Current implementation does not support more than one linearity-preserving nonideality.",
+ ),
+ (
+ [
+ nonidealities.IVNonlinearity(0.25, 3.1, 0.1203),
+ nonidealities.IVNonlinearity(0.25, 2.1, 0.1),
+ ],
+ "Current implementation does not support more than one linearity-nonpreserving nonideality.",
+ ),
+]
+
+
+@pytest.mark.parametrize("nonidealities_input,error_msg", nonidealities_exception_testdata)
+def test_nonidealities_exception(nonidealities_input, error_msg):
+ with pytest.raises(Exception) as exc:
+ _ = iterator.Stage(nonidealities=nonidealities_input)
+ assert error_msg in str(exc.value)
+ assert exc.type == ValueError
diff --git a/tests/training/test_utils.py b/tests/training/test_utils.py
new file mode 100644
index 0000000..506ccb0
--- /dev/null
+++ b/tests/training/test_utils.py
@@ -0,0 +1,110 @@
+import pytest
+import tensorflow as tf
+
+from awarememristor.training import utils
+from tests import utils as test_utils
+
+compute_avg_crossbar_power_testdata = [
+ (
+ tf.constant(
+ [
+ [
+ 1.0,
+ 0.0,
+ 2.0,
+ ],
+ ]
+ ),
+ tf.constant(
+ [
+ [
+ [0.0, 1.0],
+ [2.0, 3.0],
+ [4.0, 5.0],
+ ],
+ ]
+ ),
+ 19.0,
+ ),
+ (
+ tf.constant(
+ [
+ [
+ 4.0,
+ 1.0,
+ ],
+ [
+ 0.0,
+ 1.0,
+ ],
+ ]
+ ),
+ tf.constant(
+ [
+ [
+ [0.0, 1.0],
+ [2.0, 3.0],
+ ],
+ [
+ [2.0, 4.0],
+ [0.0, 2.0],
+ ],
+ ],
+ ),
+ 5.5,
+ ),
+ (
+ tf.constant(
+ [
+ [
+ -1.0,
+ 0.0,
+ 2.0,
+ ],
+ ]
+ ),
+ tf.constant(
+ [
+ [
+ [0.0, 1.0],
+ [2.0, 3.0],
+ [-4.0, 5.0],
+ ],
+ ]
+ ),
+ 19.0,
+ ),
+ (
+ tf.constant(
+ [
+ [
+ -4.0,
+ 1.0,
+ ],
+ [
+ 0.0,
+ -1.0,
+ ],
+ ]
+ ),
+ tf.constant(
+ [
+ [
+ [0.0, -1.0],
+ [2.0, -3.0],
+ ],
+ [
+ [2.0, 4.0],
+ [0.0, -2.0],
+ ],
+ ],
+ ),
+ 5.5,
+ ),
+]
+
+
+@pytest.mark.parametrize("V,I_ind,expected", compute_avg_crossbar_power_testdata)
+def test_compute_avg_crossbar_power(V, I_ind, expected):
+ result = utils.compute_avg_crossbar_power(V, I_ind)
+ assert pytest.approx(result) == expected
diff --git a/tests/utils.py b/tests/utils.py
new file mode 100644
index 0000000..fa462b8
--- /dev/null
+++ b/tests/utils.py
@@ -0,0 +1,12 @@
+import numpy as np
+import tensorflow as tf
+
+
+def assert_tf_approx(a, b):
+ tf.debugging.assert_near(a, b, rtol=1.0e-6, atol=1.0e-6)
+ assert a.shape == b.shape
+
+
+def assert_tf_bool_equal(a, b):
+ # Don't know how to compare boolean arrays using TF, so using numpy.
+ np.testing.assert_array_equal(a, b)