mirror of
https://github.com/Athou/commafeed.git
synced 2026-03-21 21:37:29 +00:00
admin cleanup interface
This commit is contained in:
@@ -10,13 +10,11 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.commafeed.backend.dao.FeedDAO;
|
||||
import com.commafeed.backend.dao.FeedDAO.FeedCount;
|
||||
import com.commafeed.backend.dao.FeedEntryDAO;
|
||||
import com.commafeed.backend.dao.FeedSubscriptionDAO;
|
||||
import com.commafeed.backend.model.Feed;
|
||||
import com.commafeed.backend.model.FeedSubscription;
|
||||
import com.commafeed.backend.services.ApplicationSettingsService;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
public class DatabaseCleaner {
|
||||
|
||||
@@ -75,44 +73,6 @@ public class DatabaseCleaner {
|
||||
return total;
|
||||
}
|
||||
|
||||
public long cleanDuplicateFeeds() {
|
||||
long total = 0;
|
||||
int deleted = -1;
|
||||
do {
|
||||
List<FeedCount> fcs = feedDAO
|
||||
.findDuplicates(0, applicationSettingsService.get()
|
||||
.getDatabaseUpdateThreads(), 1);
|
||||
deleted = fcs.size();
|
||||
|
||||
List<Thread> threads = Lists.newArrayList();
|
||||
for (final FeedCount fc : fcs) {
|
||||
Thread thread = new Thread() {
|
||||
public void run() {
|
||||
Feed into = feedDAO.findById(fc.feeds.get(0).getId());
|
||||
List<Feed> feeds = Lists.newArrayList();
|
||||
for (Feed feed : fc.feeds) {
|
||||
feeds.add(feedDAO.findById(feed.getId()));
|
||||
}
|
||||
mergeFeeds(into, feeds);
|
||||
};
|
||||
};
|
||||
thread.start();
|
||||
threads.add(thread);
|
||||
}
|
||||
for (Thread thread : threads) {
|
||||
try {
|
||||
thread.join();
|
||||
} catch (InterruptedException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
total += deleted;
|
||||
log.info("merged {} feeds", total);
|
||||
} while (deleted != 0);
|
||||
log.info("cleanup done: {} feeds merged", total);
|
||||
return total;
|
||||
}
|
||||
|
||||
public void mergeFeeds(Feed into, List<Feed> feeds) {
|
||||
for (Feed feed : feeds) {
|
||||
if (into.getId().equals(feed.getId())) {
|
||||
|
||||
@@ -13,6 +13,7 @@ import javax.persistence.criteria.Path;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import javax.persistence.criteria.SetJoin;
|
||||
import javax.persistence.metamodel.SingularAttribute;
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
@@ -34,7 +35,7 @@ public class FeedDAO extends GenericDAO<Feed> {
|
||||
@XmlRootElement
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public static class FeedCount {
|
||||
public String normalizedUrlHash;
|
||||
public String value;
|
||||
public List<Feed> feeds;
|
||||
}
|
||||
|
||||
@@ -136,28 +137,43 @@ public class FeedDAO extends GenericDAO<Feed> {
|
||||
|
||||
}
|
||||
|
||||
public List<FeedCount> findDuplicates(int offset, int limit, long minCount) {
|
||||
public static enum DuplicateMode {
|
||||
NORMALIZED_URL(Feed_.normalizedUrlHash), LAST_CONTENT(
|
||||
Feed_.lastContentHash), PUSH_TOPIC(Feed_.pushTopicHash);
|
||||
private SingularAttribute<Feed, String> path;
|
||||
|
||||
private DuplicateMode(SingularAttribute<Feed, String> path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public SingularAttribute<Feed, String> getPath() {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
|
||||
public List<FeedCount> findDuplicates(DuplicateMode mode, int offset,
|
||||
int limit, long minCount) {
|
||||
CriteriaQuery<String> query = builder.createQuery(String.class);
|
||||
Root<Feed> root = query.from(getType());
|
||||
|
||||
Path<String> hashPath = root.get(Feed_.normalizedUrlHash);
|
||||
Expression<Long> count = builder.count(hashPath);
|
||||
Path<String> path = root.get(mode.getPath());
|
||||
Expression<Long> count = builder.count(path);
|
||||
|
||||
query.select(hashPath);
|
||||
query.select(path);
|
||||
|
||||
query.groupBy(hashPath);
|
||||
query.groupBy(path);
|
||||
query.having(builder.greaterThan(count, minCount));
|
||||
|
||||
TypedQuery<String> q = em.createQuery(query);
|
||||
limit(q, offset, limit);
|
||||
List<String> normalizedUrlHashes = q.getResultList();
|
||||
List<String> pathValues = q.getResultList();
|
||||
|
||||
List<FeedCount> result = Lists.newArrayList();
|
||||
for (String hash : normalizedUrlHashes) {
|
||||
for (String pathValue : pathValues) {
|
||||
FeedCount fc = new FeedCount();
|
||||
fc.normalizedUrlHash = hash;
|
||||
fc.value = pathValue;
|
||||
fc.feeds = Lists.newArrayList();
|
||||
for (Feed feed : findByField(Feed_.normalizedUrlHash, hash)) {
|
||||
for (Feed feed : findByField(mode.getPath(), pathValue)) {
|
||||
Feed f = new Feed();
|
||||
f.setId(feed.getId());
|
||||
f.setUrl(feed.getUrl());
|
||||
|
||||
@@ -21,6 +21,7 @@ import com.commafeed.backend.DatabaseCleaner;
|
||||
import com.commafeed.backend.MetricsBean;
|
||||
import com.commafeed.backend.StartupBean;
|
||||
import com.commafeed.backend.dao.FeedDAO;
|
||||
import com.commafeed.backend.dao.FeedDAO.DuplicateMode;
|
||||
import com.commafeed.backend.dao.FeedDAO.FeedCount;
|
||||
import com.commafeed.backend.dao.UserDAO;
|
||||
import com.commafeed.backend.dao.UserRoleDAO;
|
||||
@@ -272,9 +273,11 @@ public class AdminREST extends AbstractResourceREST {
|
||||
@Path("/cleanup/findDuplicateFeeds")
|
||||
@GET
|
||||
@ApiOperation(value = "Find duplicate feeds")
|
||||
public Response findDuplicateFeeds(@QueryParam("page") int page,
|
||||
@QueryParam("limit") int limit, @QueryParam("minCount") long minCount) {
|
||||
List<FeedCount> list = feedDAO.findDuplicates(limit * page, limit, minCount);
|
||||
public Response findDuplicateFeeds(@QueryParam("mode") DuplicateMode mode,
|
||||
@QueryParam("page") int page, @QueryParam("limit") int limit,
|
||||
@QueryParam("minCount") long minCount) {
|
||||
List<FeedCount> list = feedDAO.findDuplicates(mode, limit * page,
|
||||
limit, minCount);
|
||||
return Response.ok(list).build();
|
||||
}
|
||||
|
||||
@@ -303,15 +306,4 @@ public class AdminREST extends AbstractResourceREST {
|
||||
cleaner.mergeFeeds(into, feeds);
|
||||
return Response.ok().build();
|
||||
}
|
||||
|
||||
@Path("/cleanup/automerge")
|
||||
@GET
|
||||
@ApiOperation(value = "Automatically merge feeds", notes = "Merge feeds together")
|
||||
public Response autoMergeFeeds() {
|
||||
Map<String, Long> map = Maps.newHashMap();
|
||||
map.put("merged feeds",
|
||||
cleaner.cleanDuplicateFeeds());
|
||||
return Response.ok(map).build();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user