admin cleanup interface

This commit is contained in:
Athou
2013-07-10 15:06:04 +02:00
parent c7d316e17b
commit be4b15be70
7 changed files with 55 additions and 67 deletions

View File

@@ -10,13 +10,11 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.commafeed.backend.dao.FeedDAO;
import com.commafeed.backend.dao.FeedDAO.FeedCount;
import com.commafeed.backend.dao.FeedEntryDAO;
import com.commafeed.backend.dao.FeedSubscriptionDAO;
import com.commafeed.backend.model.Feed;
import com.commafeed.backend.model.FeedSubscription;
import com.commafeed.backend.services.ApplicationSettingsService;
import com.google.common.collect.Lists;
public class DatabaseCleaner {
@@ -75,44 +73,6 @@ public class DatabaseCleaner {
return total;
}
public long cleanDuplicateFeeds() {
long total = 0;
int deleted = -1;
do {
List<FeedCount> fcs = feedDAO
.findDuplicates(0, applicationSettingsService.get()
.getDatabaseUpdateThreads(), 1);
deleted = fcs.size();
List<Thread> threads = Lists.newArrayList();
for (final FeedCount fc : fcs) {
Thread thread = new Thread() {
public void run() {
Feed into = feedDAO.findById(fc.feeds.get(0).getId());
List<Feed> feeds = Lists.newArrayList();
for (Feed feed : fc.feeds) {
feeds.add(feedDAO.findById(feed.getId()));
}
mergeFeeds(into, feeds);
};
};
thread.start();
threads.add(thread);
}
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
log.error(e.getMessage(), e);
}
}
total += deleted;
log.info("merged {} feeds", total);
} while (deleted != 0);
log.info("cleanup done: {} feeds merged", total);
return total;
}
public void mergeFeeds(Feed into, List<Feed> feeds) {
for (Feed feed : feeds) {
if (into.getId().equals(feed.getId())) {

View File

@@ -13,6 +13,7 @@ import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.persistence.criteria.SetJoin;
import javax.persistence.metamodel.SingularAttribute;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
@@ -34,7 +35,7 @@ public class FeedDAO extends GenericDAO<Feed> {
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class FeedCount {
public String normalizedUrlHash;
public String value;
public List<Feed> feeds;
}
@@ -136,28 +137,43 @@ public class FeedDAO extends GenericDAO<Feed> {
}
public List<FeedCount> findDuplicates(int offset, int limit, long minCount) {
public static enum DuplicateMode {
NORMALIZED_URL(Feed_.normalizedUrlHash), LAST_CONTENT(
Feed_.lastContentHash), PUSH_TOPIC(Feed_.pushTopicHash);
private SingularAttribute<Feed, String> path;
private DuplicateMode(SingularAttribute<Feed, String> path) {
this.path = path;
}
public SingularAttribute<Feed, String> getPath() {
return path;
}
}
public List<FeedCount> findDuplicates(DuplicateMode mode, int offset,
int limit, long minCount) {
CriteriaQuery<String> query = builder.createQuery(String.class);
Root<Feed> root = query.from(getType());
Path<String> hashPath = root.get(Feed_.normalizedUrlHash);
Expression<Long> count = builder.count(hashPath);
Path<String> path = root.get(mode.getPath());
Expression<Long> count = builder.count(path);
query.select(hashPath);
query.select(path);
query.groupBy(hashPath);
query.groupBy(path);
query.having(builder.greaterThan(count, minCount));
TypedQuery<String> q = em.createQuery(query);
limit(q, offset, limit);
List<String> normalizedUrlHashes = q.getResultList();
List<String> pathValues = q.getResultList();
List<FeedCount> result = Lists.newArrayList();
for (String hash : normalizedUrlHashes) {
for (String pathValue : pathValues) {
FeedCount fc = new FeedCount();
fc.normalizedUrlHash = hash;
fc.value = pathValue;
fc.feeds = Lists.newArrayList();
for (Feed feed : findByField(Feed_.normalizedUrlHash, hash)) {
for (Feed feed : findByField(mode.getPath(), pathValue)) {
Feed f = new Feed();
f.setId(feed.getId());
f.setUrl(feed.getUrl());

View File

@@ -21,6 +21,7 @@ import com.commafeed.backend.DatabaseCleaner;
import com.commafeed.backend.MetricsBean;
import com.commafeed.backend.StartupBean;
import com.commafeed.backend.dao.FeedDAO;
import com.commafeed.backend.dao.FeedDAO.DuplicateMode;
import com.commafeed.backend.dao.FeedDAO.FeedCount;
import com.commafeed.backend.dao.UserDAO;
import com.commafeed.backend.dao.UserRoleDAO;
@@ -272,9 +273,11 @@ public class AdminREST extends AbstractResourceREST {
@Path("/cleanup/findDuplicateFeeds")
@GET
@ApiOperation(value = "Find duplicate feeds")
public Response findDuplicateFeeds(@QueryParam("page") int page,
@QueryParam("limit") int limit, @QueryParam("minCount") long minCount) {
List<FeedCount> list = feedDAO.findDuplicates(limit * page, limit, minCount);
public Response findDuplicateFeeds(@QueryParam("mode") DuplicateMode mode,
@QueryParam("page") int page, @QueryParam("limit") int limit,
@QueryParam("minCount") long minCount) {
List<FeedCount> list = feedDAO.findDuplicates(mode, limit * page,
limit, minCount);
return Response.ok(list).build();
}
@@ -303,15 +306,4 @@ public class AdminREST extends AbstractResourceREST {
cleaner.mergeFeeds(into, feeds);
return Response.ok().build();
}
@Path("/cleanup/automerge")
@GET
@ApiOperation(value = "Automatically merge feeds", notes = "Merge feeds together")
public Response autoMergeFeeds() {
Map<String, Long> map = Maps.newHashMap();
map.put("merged feeds",
cleaner.cleanDuplicateFeeds());
return Response.ok(map).build();
}
}

View File

@@ -1213,9 +1213,11 @@ module.controller('ManageDuplicateFeedsCtrl', [
$scope.limit = 10;
$scope.page = 0;
$scope.minCount = 1;
$scope.mode = 'NORMALIZED_URL';
$scope.mergeData = {};
$scope.refreshData = function() {
AdminCleanupService.findDuplicateFeeds({
mode: $scope.mode,
limit : $scope.limit,
page : $scope.page,
minCount: $scope.minCount
@@ -1342,6 +1344,9 @@ function($scope, $location, $state, AdminSettingsService) {
$scope.toUsers = function() {
$state.transitionTo('admin.userlist');
};
$scope.toCleanup = function() {
$state.transitionTo('admin.duplicate_feeds');
};
}]);
module.controller('HelpController', [ '$scope', 'CategoryService',

View File

@@ -92,7 +92,7 @@ app.config([ '$routeProvider', '$stateProvider', '$urlRouterProvider', '$httpPro
controller : 'ManageUserCtrl'
});
$stateProvider.state('admin.duplicate_feeds', {
url : '/feeds/duplicates/',
url : '/feeds/duplicates',
templateUrl : 'templates/admin.duplicate_feeds.html',
controller : 'ManageDuplicateFeedsCtrl'
});

View File

@@ -1,10 +1,21 @@
<div class="row">
<div>
Limit <input type="number" ng-model="limit" />
Page <input type="number" ng-model="page" />
Min. count <input type="number" ng-model="minCount" />
<input type="button" class="btn" ng-click="refreshData()" value="Refresh" />
<input type="button" class="btn" ng-click="autoMerge()" value="Auto merge selected" />
</div>
<div>
Mode
<select ng-model="mode">
<option value="NORMALIZED_URL">Normalized URLs</option>
<option value="LAST_CONTENT">Last content</option>
<option value="PUSH_TOPIC">Pubsubhubbub topic URL</option>
</select>
<input type="button" class="btn" ng-click="refreshData()" value="Refresh" />
<input type="button" class="btn" ng-click="autoMerge()" value="Auto merge selected" />
</div>
<table class="table table-condensed table-hover" ui-if="counts">
<thead>
<tr>

View File

@@ -5,6 +5,10 @@
<small>
<a ng-click="toUsers()" class="pointer">Manage users</a>
</small>
-
<small>
<a ng-click="toCleanup()" class="pointer">Cleanup feeds</a>
</small>
</h1>
</div>