最大和
错误
#include<bits/stdc++.h> using namespace std; const int N = 1e6; int isprime[1000005] = {}; int d(int t)//最小质因数 { for (int i = 1; i <= N; i++)//最大就是自己本身 { isprime[i] = i; } for (int i = 3; i < sqrt(N); i+=2)//抓奇数 { for (int j = i * 2; j <= N; j += i) { if (isprime[j]>i)//使得i是最小的 { isprime[j] = i; } } } if (t % 2 == 0)//偶数最小因子一定是2 { return 2; } else { return isprime[t]; } } int main() { int n,ans=0,square[10001],dp[10001]; cin>>n; for(int i=1;i<=n;i++) cin>>square[i]; for(int i=1;i<=n;i++) { if(i+1<=n) { dp[i+1]=max(dp[i+1],dp[i]+square[i+1]); if(dp[i+1]>ans) { ans=dp[i+1]; } } if(i+d(n-i)<=n) { dp[i+d(n-i)]=max(dp[i+d(n-i)],dp[i]+square[i+d(n-i)]); if(dp[i+d(n-i)]>ans) { ans=dp[i+d(n-i)]; } } } cout<<ans; return 0; }
通过
#include <iostream> #include <cstdio> #include <string> #include <vector> #include <cstdlib> #include <cstring> #include <queue> #include <cmath> #include <algorithm> using namespace std; const int MIND = -2000000000; int n, a[10003]; int D[10003]; int f[10003]; void getD() { D[1] = 1; for (int i = 2; i <= 10000; i++) { int isfind = 0; for (int j = 2; j*j <= i; j++) { if (i % j == 0) { D[i] = j; // 最小质因数 isfind = 1; break; } } if (!isfind) { // is a prime D[i] = i; } } } void solve() { f[1] = a[1]; for (int i = 1; i <= n; i++) { int end = D[n-i]; for (int j = 1; j <= end; j++) { if (i + j > n) break; f[i+j] = max(f[i+j], f[i]+a[i+j]); } } cout << f[n]; } int main() { // codes here cin >> n; for (int i = 1; i <= n; i++) { cin >> a[i]; f[i] = MIND; } getD(); solve(); return 0; }