package net.oni2.aeinstaller.backend.depot; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Vector; import net.oni2.aeinstaller.backend.Settings; import net.oni2.aeinstaller.backend.Settings.Platform; import net.oni2.aeinstaller.backend.depot.model.File; import net.oni2.aeinstaller.backend.depot.model.Node; import net.oni2.aeinstaller.backend.depot.model.NodeField_Body; import net.oni2.aeinstaller.backend.depot.model.NodeField_Upload; import net.oni2.aeinstaller.backend.depot.model.NodeMod; import net.oni2.aeinstaller.backend.depot.model.TaxonomyTerm; import net.oni2.aeinstaller.backend.depot.model.TaxonomyVocabulary; import net.oni2.aeinstaller.backend.mods.ECompatiblePlatform; import net.oni2.aeinstaller.backend.network.DrupalJSONQuery; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.thoughtworks.xstream.XStream; import com.thoughtworks.xstream.io.xml.StaxDriver; /** * @author Christian Illy */ public class DepotManager { private static DepotManager instance = new DepotManager(); private HashMap taxonomyVocabulary = new HashMap(); private HashMap taxonomyTerms = new HashMap(); private HashMap nodes = new HashMap(); private HashMap> nodesByType = new HashMap>(); private HashMap files = new HashMap(); private int vocabId_type = -1; private int vocabId_platform = -1; private int vocabId_instmethod = -1; /** * @return Singleton instance */ public static DepotManager getInstance() { return instance; } /** * Update local Depot information cache * * @param forceRefreshAll * Force refreshing all data, even if it seems to be cached * @param listener * Listener for update status */ public void updateInformation(boolean forceRefreshAll, DepotCacheUpdateProgressListener listener) { taxonomyTerms.clear(); taxonomyVocabulary.clear(); HashMap oldNodes = null; HashMap oldFiles = null; if (forceRefreshAll) { oldNodes = new HashMap(); oldFiles = new HashMap(); } else { oldNodes = nodes; oldFiles = files; } nodes = new HashMap(); nodesByType = new HashMap>(); files = new HashMap(); try { JSONArray ja; JSONObject jo; int page; // Get taxonomy vocabulary if (listener != null) listener.cacheUpdateProgress("Updating taxonomy vocabulary", 0, 100); page = 0; do { ja = DrupalJSONQuery.getIndex("taxonomy_vocabulary", page, 100); for (int i = 0; i < ja.length(); i++) { jo = ja.getJSONObject(i); TaxonomyVocabulary tv = new TaxonomyVocabulary(jo); taxonomyVocabulary.put(tv.getVid(), tv); } page++; } while (ja.length() > 0); // Get taxonomy terms if (listener != null) listener.cacheUpdateProgress("Updating taxonomy terms", 0, 100); page = 0; do { ja = DrupalJSONQuery.getIndex("taxonomy_term", page, 100); for (int i = 0; i < ja.length(); i++) { jo = ja.getJSONObject(i); TaxonomyTerm tt = new TaxonomyTerm(jo); taxonomyTerms.put(tt.getTid(), tt); } page++; } while (ja.length() > 0); // Check nodes for new information if (listener != null) listener.cacheUpdateProgress("Checking for new/updated nodes", 1, 100); HashSet nodesToUpdate = new HashSet(); page = 0; do { ja = DrupalJSONQuery.getIndex("node", page, 500); for (int i = 0; i < ja.length(); i++) { jo = ja.getJSONObject(i); int nid = jo.getInt("nid"); long changedRemote = jo.getLong("changed"); if (oldNodes.containsKey(nid)) { if (changedRemote > oldNodes.get(nid).getChanged()) nodesToUpdate.add(nid); else { Node n = oldNodes.get(nid); nodes.put(nid, n); if (!nodesByType.containsKey(n.getType())) nodesByType.put(n.getType(), new HashMap()); nodesByType.get(n.getType()).put(nid, n); } } else { nodesToUpdate.add(nid); } } page++; } while (ja.length() > 0); // Check files for new stuff if (listener != null) listener.cacheUpdateProgress("Checking for new/updated files", 2, 100); HashSet filesToUpdate = new HashSet(); page = 0; do { ja = DrupalJSONQuery.getIndex("file", page, 500); for (int i = 0; i < ja.length(); i++) { jo = ja.getJSONObject(i); int fid = jo.getInt("fid"); long changedRemote = jo.getLong("timestamp"); if (oldFiles.containsKey(fid)) { if (changedRemote > oldFiles.get(fid).getTimestamp()) filesToUpdate.add(fid); else files.put(fid, oldFiles.get(fid)); } else { filesToUpdate.add(fid); } } page++; } while (ja.length() > 0); int total = nodesToUpdate.size() + filesToUpdate.size() + 3; int step = 3; // Update nodes with new information for (int nid : nodesToUpdate) { if (listener != null) listener.cacheUpdateProgress("Updating nodes", step++, total); ja = DrupalJSONQuery.getItem("node", nid, ""); jo = ja.getJSONObject(0); String type = jo.getString("type"); Node n = null; if (type.equalsIgnoreCase(DepotConfig.getNodeType_Mod())) n = new NodeMod(jo); else n = new Node(jo); nodes.put(nid, n); if (!nodesByType.containsKey(type)) nodesByType.put(type, new HashMap()); nodesByType.get(type).put(nid, n); } // Update new files for (int fid : filesToUpdate) { if (listener != null) listener.cacheUpdateProgress("Updating files", step++, total); ja = DrupalJSONQuery.getItem("file", fid, "&file_contents=0"); jo = ja.getJSONObject(0); File f = new File(jo); files.put(fid, f); } vocabId_type = getVocabulary( DepotConfig.getVocabularyName_ModType()).getVid(); vocabId_platform = getVocabulary( DepotConfig.getVocabularyName_Platform()).getVid(); vocabId_instmethod = getVocabulary( DepotConfig.getVocabularyName_InstallType()).getVid(); } catch (JSONException e) { e.printStackTrace(); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); } } /** * @return All TaxVocabs */ public Vector getVocabulary() { return new Vector(taxonomyVocabulary.values()); } /** * @param id * Get taxonomy vocabulary by given ID * @return TaxVocab */ public TaxonomyVocabulary getVocabulary(int id) { return taxonomyVocabulary.get(id); } /** * @param name * Get taxonomy vocabulary by given name * @return TaxVocab */ public TaxonomyVocabulary getVocabulary(String name) { for (TaxonomyVocabulary v : taxonomyVocabulary.values()) { if (v.getName().equalsIgnoreCase(name)) return v; } return null; } /** * @param vocabId * Get all taxonomy terms of a given vocabulary * @return TaxTerms */ public Vector getTaxonomyTermsByVocabulary(int vocabId) { Vector res = new Vector(); for (TaxonomyTerm t : taxonomyTerms.values()) { if (t.getVid() == vocabId) res.add(t); } return res; } /** * @param id * Get taxonomy term by given ID * @return TaxTerm */ public TaxonomyTerm getTaxonomyTerm(int id) { return taxonomyTerms.get(id); } /** * @param name * Get taxonomy term by given name * @return TaxTerm */ public TaxonomyTerm getTaxonomyTerm(String name) { for (TaxonomyTerm t : taxonomyTerms.values()) { if (t.getName().equalsIgnoreCase(name)) return t; } return null; } /** * Get all nodes of given node type * * @param nodeType * Node type * @return Nodes of type nodeType */ public Vector getNodesByType(String nodeType) { return new Vector(nodesByType.get(nodeType).values()); } /** * Get a node by node id * * @param id * Node id * @return Node */ public Node getNodeById(int id) { return nodes.get(id); } /** * Get a Mod-Node by a given package number * * @param packageNumber * Package number to find * @return The Mod-Node or null */ public NodeMod getNodeByPackageNumber(int packageNumber) { Vector files = getNodesByType(DepotConfig.getNodeType_Mod()); for (Node n : files) { if (n instanceof NodeMod) { NodeMod nm = (NodeMod) n; if (nm.getPackageNumber() == packageNumber) return nm; } } return null; } /** * @return Mod-Nodes */ public Vector getModPackageNodes() { Vector result = new Vector(); String instMethName = DepotConfig.getTaxonomyName_InstallType_Package(); Vector files = getNodesByType(DepotConfig.getNodeType_Mod()); for (Node n : files) { if (n instanceof NodeMod) { NodeMod nm = (NodeMod) n; if (nm.getInstallMethod().getName() .equalsIgnoreCase(instMethName)) result.add(nm); } } return result; } /** * @param node * Node to check validity on * @param platform * Platform to check against * @return True if valid on platform */ public boolean isModValidOnPlatform(NodeMod node, Settings.Platform platform) { ECompatiblePlatform plat = node.getPlatform(); switch (plat) { case BOTH: return true; case WIN: return (platform == Platform.WIN) || (platform == Platform.LINUX); case MACOS: return (platform == Platform.MACOS); } return false; } /** * Checks if the given mod-node is of the given mod-type(s) * * @param node * Node to check * @param type * Type(s) to check * @param or * If false check if all given types are included in node. If * true checks if either of the given types is included. * @return True if of given type(s) */ public boolean isModOfType(NodeMod node, HashSet type, boolean or) { boolean matching = !or; HashSet terms = node.getTypes(); for (int t : type) { if (or) matching |= terms.contains(t); else matching &= terms.contains(t); } return matching; } /** * @return VocabId of Platform vocabulary */ public int getVocabIdPlatform() { return vocabId_platform; } /** * @return VocabId of Install method vocabulary */ public int getVocabIdInstMethod() { return vocabId_instmethod; } /** * @return VocabId of Type vocabulary */ public int getVocabIdType() { return vocabId_type; } /** * @param id * ID of file to get * @return the file */ public File getFile(int id) { return files.get(id); } /** * Print stats about nodes and files */ public void printStats() { System.out.println("Nodes by type:"); for (String t : nodesByType.keySet()) { System.out.println(" " + t + ": " + nodesByType.get(t).size()); } System.out.println("Files: " + files.size()); } private XStream getXStream() { XStream xs = new XStream(new StaxDriver()); xs.alias("Depot", DepotManager.class); xs.alias("File", net.oni2.aeinstaller.backend.depot.model.File.class); xs.alias("Node", Node.class); xs.alias("NodeField_Body", NodeField_Body.class); xs.alias("NodeField_Upload", NodeField_Upload.class); xs.alias("NodeMod", NodeMod.class); xs.alias("TaxonomyTerm", TaxonomyTerm.class); xs.alias("TaxonomyVocabulary", TaxonomyVocabulary.class); return xs; } /** * Save Depot cache instance to file * * @param f * File to write to */ public void saveToFile(java.io.File f) { try { FileOutputStream fos = new FileOutputStream(f); XStream xs = getXStream(); xs.toXML(this, fos); fos.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } /** * Load Depot cache instance from file * * @param f * File to read from */ public void loadFromFile(java.io.File f) { try { FileInputStream fis = new FileInputStream(f); XStream xs = getXStream(); Object obj = xs.fromXML(fis); if (obj instanceof DepotManager) instance = (DepotManager) obj; fis.close(); } catch (FileNotFoundException e) { } catch (IOException e) { } } }