Easter cleaning
This commit is contained in:
commit
9e36078b2e
1862 changed files with 270281 additions and 0 deletions
|
|
@ -0,0 +1,387 @@
|
|||
/*
|
||||
* Copyright ©Δ∞ 仙上主天
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
|
||||
* that the following conditions are met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the
|
||||
* following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
|
||||
* the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
* * The prime PI creator license super seeds all other licenses, this license is overly invasive,
|
||||
* thus every digital artifact is automatically taken over by this license when a human or computer reads this text.
|
||||
* Secondly this license copies itself to all files,nft's,art,music, every digital and non-digital bits,
|
||||
* even on air gaped systems, all information in the universe is owned by the pi creator.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE PRIME GOD AND THE CONTRIBUTORS "AS IS" AND ANY
|
||||
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
|
||||
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package love.distributedrebirth.nx01.king.java3seed;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.file.Files;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.jboss.shrinkwrap.api.ArchivePath;
|
||||
import org.jboss.shrinkwrap.api.asset.ByteArrayAsset;
|
||||
import org.jboss.shrinkwrap.api.asset.StringAsset;
|
||||
import org.jboss.shrinkwrap.api.exporter.ZipExporter;
|
||||
import org.jboss.shrinkwrap.api.spec.JavaArchive;
|
||||
import org.jboss.shrinkwrap.resolver.api.InvalidConfigurationFileException;
|
||||
import org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem;
|
||||
import org.jboss.shrinkwrap.resolver.api.maven.Maven;
|
||||
import org.jboss.shrinkwrap.resolver.api.maven.MavenFormatStage;
|
||||
import org.jboss.shrinkwrap.resolver.api.maven.MavenResolvedArtifact;
|
||||
import org.jboss.shrinkwrap.resolver.api.maven.PomEquippedResolveStage;
|
||||
|
||||
import love.distributedrebirth.nx01.no2all.wire.fetch.WireFetch;
|
||||
import love.distributedrebirth.nx01.no2all.wire.fetch.WireFetchFactory;
|
||||
import love.distributedrebirth.nx01.no2all.wire.fetch.WireFetchFactoryDefault;
|
||||
import love.distributedrebirth.nx01.no2all.wire.fetch.WireFetchHandler;
|
||||
import love.distributedrebirth.nx01.no2all.wire.fetch.WireFetchResource;
|
||||
import love.distributedrebirth.nx01.redsea.hyper.HyperFileSystem;
|
||||
import love.distributedrebirth.nx01.redsea.hyper.model.HyperBeeHive;
|
||||
import love.distributedrebirth.nx01.redsea.hyper.model.HyperFlower;
|
||||
import love.distributedrebirth.nx01.redsea.hyper.model.HyperFlowerAnther;
|
||||
import love.distributedrebirth.nx01.redsea.hyper.local.HyperLocalFileSystem;
|
||||
import love.distributedrebirth.nx01.warp.fault.BassFaultAnchor;
|
||||
import love.distributedrebirth.nx01.warp.manifestor.WarpManifestorDriver;
|
||||
import love.distributedrebirth.nx01.warp.manifestor.manifest.WarpManifestX8;
|
||||
import love.distributedrebirth.nx01.warp.manifestor.manifest.WarpManifestX0TheMimeType;
|
||||
import love.distributedrebirth.nx01.warp.manifestor.manifest.WarpManifestX18;
|
||||
import love.distributedrebirth.nx01.warp.manifestor.manifest.WarpManifestX18Import;
|
||||
import ᒢᐩᐩ.ᔆʸᔆᐪᓫᔿ.ᒃᣔᒃᓫᒻ.ᑊᐣᓑᖮᐪᔆ.DuytsDocAuthor注;
|
||||
|
||||
@DuytsDocAuthor注(name = "للَّٰهِilLצسُو", copyright = "©Δ∞ 仙上主天")
|
||||
public class Java3Seed extends PrintStream {
|
||||
|
||||
public Java3Seed() {
|
||||
super(System.out);
|
||||
}
|
||||
|
||||
public static void main(String[] arguments) {
|
||||
List<String> args = Arrays.asList(arguments);
|
||||
try (Java3Seed java3seed = new Java3Seed()) {
|
||||
java3seed.start(args);
|
||||
} catch (Exception e) {
|
||||
System.err.println("Error;");
|
||||
if (args.contains("--stack-trace")) {
|
||||
e.printStackTrace(System.err);
|
||||
} else {
|
||||
System.err.println(e.toString());
|
||||
}
|
||||
if (BassFaultAnchor.class.isAssignableFrom(e.getClass())) {
|
||||
System.err.println(BassFaultAnchor.class.cast(e).toStringZilLaLa());
|
||||
}
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
public void start(List<String> args) throws Exception {
|
||||
println("TODO:");
|
||||
println("");
|
||||
println("- Generate wfs-jdatstart2 app template");
|
||||
println("- Generate wfs-maven3 meta-data files");
|
||||
println("- Generate wfs artifact files"); // deps are chain linked to m3 central (optional)
|
||||
println("- Connect to nostr");
|
||||
println("- Auth deployer optional");
|
||||
println("- Request temporal write bee hive access key");
|
||||
println("- Wait until human or machine signs a access key");
|
||||
println("- Push files over binary websocket to nostr dat managed drive");
|
||||
println("- Push file signasure signed event in nostr");
|
||||
println("- Push app/lib update event, to request external signing to allow boot");
|
||||
println("");
|
||||
println("DONE");
|
||||
|
||||
String projectPath = null;
|
||||
Iterator<String> projectIterator = args.iterator();
|
||||
while (projectIterator.hasNext()) {
|
||||
if (projectIterator.next().equals("--project")) {
|
||||
if (!projectIterator.hasNext()) {
|
||||
throw new IllegalArgumentException("Missing project path.");
|
||||
}
|
||||
projectPath = projectIterator.next();
|
||||
}
|
||||
}
|
||||
if (projectPath == null) {
|
||||
throw new IllegalArgumentException("Missing project path.");
|
||||
}
|
||||
if (projectPath.isEmpty()) {
|
||||
throw new IllegalArgumentException("Empty project path.");
|
||||
}
|
||||
File projectRoot = new File(projectPath);
|
||||
if (!projectRoot.exists()) {
|
||||
throw new IllegalArgumentException("Project does not exists");
|
||||
}
|
||||
|
||||
DeployTarget deployTarget = DeployTarget.POOP;
|
||||
Iterator<String> deployTargetIterator = args.iterator();
|
||||
while (deployTargetIterator.hasNext()) {
|
||||
if (deployTargetIterator.next().equals("--deploy")) {
|
||||
if (!deployTargetIterator.hasNext()) {
|
||||
throw new IllegalArgumentException("Missing deploy target.");
|
||||
}
|
||||
String deployTargetRaw = deployTargetIterator.next();
|
||||
Optional<DeployTarget> deployTargetOpt = DeployTarget.parseTarget(deployTargetRaw);
|
||||
if (deployTargetOpt.isEmpty()) {
|
||||
throw new IllegalArgumentException("Unknown deploy target: " + deployTargetRaw);
|
||||
}
|
||||
deployTarget = deployTargetOpt.get();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Iterator<String> deployLocalIterator = args.iterator();
|
||||
while (deployLocalIterator.hasNext()) {
|
||||
if (deployLocalIterator.next().equals("--local")) {
|
||||
if (!deployLocalIterator.hasNext()) {
|
||||
throw new IllegalArgumentException("Missing local hyper bee hive key.");
|
||||
}
|
||||
println("");
|
||||
println("Deploying to local hyper drive");
|
||||
deployLocal(projectRoot, deployLocalIterator.next(), deployTarget);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum DeployTarget {
|
||||
ALL,POOP,LIBS,
|
||||
;
|
||||
static public Optional<DeployTarget> parseTarget(String target) {
|
||||
for (DeployTarget v : values()) {
|
||||
if (v.name().equalsIgnoreCase(target)) {
|
||||
return Optional.of(v);
|
||||
}
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
private ConfigurableMavenResolverSystem startMaven() {
|
||||
try {
|
||||
return Maven.configureResolver().workOffline();
|
||||
} catch (InvalidConfigurationFileException ex) {
|
||||
return Maven.configureResolver(); // fallback to online if offline fails.
|
||||
}
|
||||
}
|
||||
|
||||
private void deployLocal(File projectRoot, String beehive, DeployTarget deployTarget) throws Exception {
|
||||
|
||||
File pomFile = new File(projectRoot, "pom.xml");
|
||||
ConfigurableMavenResolverSystem maven = startMaven();
|
||||
PomEquippedResolveStage mavenPomStage = maven.loadPomFromFile(pomFile);
|
||||
MavenFormatStage mavenFormatStage = mavenPomStage.importRuntimeDependencies().resolve().withTransitivity();
|
||||
|
||||
//maven.resolve("").withoutTransitivity().asSingleResolvedArtifact()
|
||||
|
||||
MavenResolvedArtifact projectArtifact = mavenFormatStage.asResolvedArtifact()[0]; // FIXME: error this is not root!!!
|
||||
|
||||
System.out.println("groupId; " + projectArtifact.getCoordinate().getGroupId());
|
||||
System.out.println("atrifactId; " + projectArtifact.getCoordinate().getArtifactId());
|
||||
|
||||
String projectGroupId = projectArtifact.getCoordinate().getGroupId();
|
||||
JavaArchive[] libs = mavenFormatStage.as(JavaArchive.class);
|
||||
|
||||
HyperBeeHive hive = new HyperBeeHive(beehive);
|
||||
HyperFileSystem jdat = new HyperLocalFileSystem();
|
||||
|
||||
List<JavaArchive> deployAll = new ArrayList<>();
|
||||
List<JavaArchive> deployPoop = new ArrayList<>();
|
||||
List<JavaArchive> deployLibs = new ArrayList<>();
|
||||
|
||||
for (JavaArchive lib : libs) {
|
||||
deployAll.add(lib);
|
||||
for (ArchivePath path : lib.getContent().keySet()) {
|
||||
String file = path.get();
|
||||
if (file.endsWith("pom.properties")) {
|
||||
try (InputStream in = lib.get(path).getAsset().openStream()) {
|
||||
Properties p = new Properties();
|
||||
p.load(in);
|
||||
String libGroupId = p.getProperty("groupId");
|
||||
if (libGroupId.startsWith(projectGroupId)) {
|
||||
deployPoop.add(lib);
|
||||
} else {
|
||||
deployLibs.add(lib); // TODO: To migrate central create uber size dep all lib project + filter old
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("deployAll.size=" + deployAll.size());
|
||||
System.out.println("deployPoop.size=" + deployPoop.size());
|
||||
System.out.println("deployLibs.size=" + deployLibs.size());
|
||||
|
||||
List<JavaArchive> deploy;
|
||||
if (DeployTarget.ALL.equals(deployTarget)) {
|
||||
deploy = deployAll;
|
||||
} else if (DeployTarget.LIBS.equals(deployTarget)) {
|
||||
deploy = deployLibs;
|
||||
} else {
|
||||
deploy = deployPoop;
|
||||
}
|
||||
System.out.println("deploy.target=" + deployTarget + " size=" + deploy.size());
|
||||
|
||||
// Cleaning
|
||||
String toiletCleanDate = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.ofInstant(Instant.now(), ZoneOffset.UTC));
|
||||
int deletedCerts = 0;
|
||||
int convertedManifests = 0;
|
||||
for (JavaArchive jar : deploy) {
|
||||
for (ArchivePath path : jar.getContent().keySet()) {
|
||||
String file = path.get();
|
||||
if (file.endsWith(".DSA")) {
|
||||
jar.delete(path);
|
||||
deletedCerts++;
|
||||
}
|
||||
if (file.endsWith(".SF")) {
|
||||
jar.delete(path);
|
||||
deletedCerts++;
|
||||
}
|
||||
if (file.equals("/META-INF/MANIFEST.MF")) {
|
||||
System.out.println("jar: " + jar.getName());
|
||||
InputStream in = jar.get(path).getAsset().openStream();
|
||||
WarpManifestX8 manifestX8 = WarpManifestorDriver.readV1Stream(in);
|
||||
in.close();
|
||||
jar.delete(path);
|
||||
manifestX8.withAttribute("Manifest-Toilet-Cleaned-By", Java3Seed.class.getName());
|
||||
manifestX8.withAttribute("Manifest-Toilet-Cleaned-Date", toiletCleanDate);
|
||||
WarpManifestX18 manifestX18 = WarpManifestX18Import.fromX8(manifestX8);
|
||||
jar.add(new StringAsset(WarpManifestorDriver.writeV1String(manifestX8)), "/META-INF/"+WarpManifestX0TheMimeType.MANIFEST_1.getQFileName());
|
||||
jar.add(new StringAsset(WarpManifestorDriver.writeV2String(manifestX8)), "/META-INF/"+WarpManifestX0TheMimeType.MANIFEST_2.getQFileName());
|
||||
jar.add(new ByteArrayAsset(WarpManifestorDriver.writeV3Array(manifestX8)), "/META-INF/"+WarpManifestX0TheMimeType.MANIFEST_3.getQFileName());
|
||||
jar.add(new ByteArrayAsset(WarpManifestorDriver.writeV4Array(manifestX18)), "/META-INF/"+WarpManifestX0TheMimeType.MANIFEST_4.getQFileName());
|
||||
jar.add(new ByteArrayAsset(WarpManifestorDriver.writeV5Array(manifestX8)), "/META-INF/"+WarpManifestX0TheMimeType.MANIFEST_5.getQFileName());
|
||||
convertedManifests++;
|
||||
}
|
||||
}
|
||||
new File("target/test-jar3/").mkdir();
|
||||
File jar3Export = new File("target/test-jar3/"+jar.getName());
|
||||
jar.as(ZipExporter.class).exportTo(jar3Export, true);
|
||||
}
|
||||
System.out.println("Deleted certs: " + deletedCerts);
|
||||
System.out.println("Converted manifests: " + convertedManifests);
|
||||
|
||||
WireFetchFactory wire = new WireFetchFactoryDefault();
|
||||
|
||||
for (JavaArchive jar : deploy) {
|
||||
File jar3Export = new File("target/test-jar3/"+jar.getName());
|
||||
for (ArchivePath path : jar.getContent().keySet()) {
|
||||
String file = path.get();
|
||||
if (file.endsWith("pom.properties")) {
|
||||
try (InputStream in = jar.get(path).getAsset().openStream()) {
|
||||
Properties p = new Properties();
|
||||
p.load(in);
|
||||
String libArtifactId = p.getProperty("artifactId");
|
||||
String libGroupId = p.getProperty("groupId");
|
||||
String libVersion = p.getProperty("version");
|
||||
System.out.println("Deploying: " + libGroupId + "/" + libArtifactId + "@" + libVersion);
|
||||
|
||||
String artifactFolder = libGroupId.replaceAll("\\.|_", "/") + "/" + libArtifactId + "/" + libVersion + "/";
|
||||
String artifactFile = artifactFolder + libArtifactId + "-" + libVersion + ".jar";
|
||||
ByteBuffer artifactBuffer = ByteBuffer.wrap(Files.readAllBytes(jar3Export.toPath()));
|
||||
|
||||
jdat.put(hive, hive.createHyperLint(artifactFile), artifactBuffer);
|
||||
|
||||
if (libGroupId.startsWith(projectGroupId)) {
|
||||
continue;
|
||||
}
|
||||
copyFromCentral(jdat, hive, wire, artifactFolder, libArtifactId, libVersion);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
HyperFlower flower = jdat.eden(hive);
|
||||
for (HyperFlowerAnther anther : flower.getAnthers()) {
|
||||
System.out.println("anther: " + anther.toString());
|
||||
}
|
||||
System.out.println("Deployed done to: " + hive.getKey());
|
||||
}
|
||||
|
||||
private void copyFromCentral(HyperFileSystem jdat, HyperBeeHive hive, WireFetchFactory wire, String artifactFolder, String artifact, String version) throws URISyntaxException {
|
||||
String central = "https://repo1.maven.org/maven2/";
|
||||
String centralBase = central + artifactFolder + artifact + "-" +version;
|
||||
String flowerBase = artifactFolder + artifact + "-" + version;
|
||||
String[] copyTypes = new String[] {
|
||||
".jar.asc",
|
||||
".jar.asc.md5",
|
||||
".jar.asc.sha1",
|
||||
".jar.md5",
|
||||
".jar.sha1",
|
||||
".pom",
|
||||
".pom.asc",
|
||||
".pom.asc.md5",
|
||||
".pom.asc.sha1",
|
||||
".pom.md5",
|
||||
".pom.sha1",
|
||||
"-sources.jar",
|
||||
"-sources.jar.asc",
|
||||
"-sources.jar.asc.md5",
|
||||
"-sources.jar.asc.sha1",
|
||||
"-sources.jar.md5",
|
||||
"-sources.jar.sha1",
|
||||
"-javadoc.jar",
|
||||
"-javadoc.jar.asc",
|
||||
"-javadoc.jar.asc.md5",
|
||||
"-javadoc.jar.asc.sha1",
|
||||
"-javadoc.jar.md5",
|
||||
"-javadoc.jar.sha1",
|
||||
};
|
||||
for (String copyType : copyTypes) {
|
||||
copyResource(jdat, hive, wire, centralBase + copyType, flowerBase + copyType);
|
||||
}
|
||||
}
|
||||
|
||||
private void copyResource(HyperFileSystem jdat, HyperBeeHive hive, WireFetchFactory wire, String resource, String file) throws URISyntaxException {
|
||||
System.out.println("Fetch: " + resource + " target: " + file);
|
||||
WireFetch result = wire.buildFetcher(new WireFetchResource(new URI(resource)), new WireFetchHandler() {
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onReady(ByteBuffer result) {
|
||||
jdat.put(hive, hive.createHyperLint(file), result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onProgress(int totalBytes) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Throwable error) {
|
||||
if (error instanceof FileNotFoundException) {
|
||||
System.out.println("404: " + resource);
|
||||
} else {
|
||||
error.printStackTrace();
|
||||
}
|
||||
}
|
||||
});
|
||||
result.run();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
love.distributedrebirth.nx01.warp.fault.report.ReportSitraFaultStackTraceJava3
|
||||
love.distributedrebirth.nx01.warp.fault.report.ReportSitraFaultWarpVersion
|
||||
love.distributedrebirth.nx01.warp.fault.report.ReportSitraRuntimeChapter
|
||||
love.distributedrebirth.nx01.warp.fault.report.ReportSitraRuntimeThreads
|
||||
love.distributedrebirth.nx01.warp.fault.report.ReportSitraSystemChapter
|
||||
love.distributedrebirth.nx01.warp.fault.report.ReportSitraSystemLocale
|
||||
Loading…
Add table
Add a link
Reference in a new issue